Index: build.properties =================================================================== --- build.properties (revision 1426896) +++ build.properties (working copy) @@ -79,7 +79,7 @@ # (measured in milliseconds). Ignored if fork is disabled. When running # multiple tests inside the same Java VM (see forkMode), timeout # applies to the time that all tests use together, not to an individual test. -test.junit.timeout=43200000 +test.junit.timeout=432000000 # Use this property to selectively disable tests from the command line: # ant test -Dtest.junit.exclude="**/TestCliDriver.class" Index: metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (revision 1426896) +++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (working copy) @@ -1286,7 +1286,7 @@ return null; } return new Partition(mpart.getValues(), dbName, tblName, mpart.getCreateTime(), - mpart.getLastAccessTime(), convertToStorageDescriptor(mpart.getSd(), true), + mpart.getLastAccessTime(), convertToStorageDescriptor(mpart.getSd(), false), mpart.getParameters()); } @@ -1681,7 +1681,7 @@ query.setOrdering("partitionName ascending"); List mparts = (List) query.executeWithMap(params); - // pm.retrieveAll(mparts); // retrieveAll is pessimistic. some fields may not be needed + pm.retrieveAll(mparts); // retrieveAll is pessimistic. some fields may not be needed List results = convertToParts(dbName, tblName, mparts); // pm.makeTransientAll(mparts); // makeTransient will prohibit future access of unfetched fields query.closeAll(); Index: metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (revision 1426896) +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (working copy) @@ -237,7 +237,7 @@ String lib = part.getSd().getSerdeInfo().getSerializationLib(); try { Deserializer deserializer = SerDeUtils.lookupDeserializer(lib); - deserializer.initialize(conf, MetaStoreUtils.getSchema(part, table)); + deserializer.initialize(conf, MetaStoreUtils.getPartitionSchema(part, table)); return deserializer; } catch (RuntimeException e) { throw e; @@ -497,6 +497,15 @@ .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); } + public static Properties getPartitionSchema( + org.apache.hadoop.hive.metastore.api.Partition partition, + org.apache.hadoop.hive.metastore.api.Table table) { + return MetaStoreUtils + .getSchema(partition.getSd(), partition.getSd(), partition + .getParameters(), partition.getDbName(), partition.getTableName(), + table.getPartitionKeys()); + } + public static Properties getSchema( org.apache.hadoop.hive.metastore.api.Partition part, org.apache.hadoop.hive.metastore.api.Table table) { Index: common/src/java/org/apache/hadoop/hive/common/ObjectPair.java =================================================================== --- common/src/java/org/apache/hadoop/hive/common/ObjectPair.java (revision 0) +++ common/src/java/org/apache/hadoop/hive/common/ObjectPair.java (working copy) @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.common; + +public class ObjectPair { + private F first; + private S second; + + public ObjectPair() {} + + public ObjectPair(F first, S second) { + this.first = first; + this.second = second; + } + + public F getFirst() { + return first; + } + + public void setFirst(F first) { + this.first = first; + } + + public S getSecond() { + return second; + } + + public void setSecond(S second) { + this.second = second; + } +} Index: serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java (revision 1426896) +++ serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java (working copy) @@ -56,32 +56,11 @@ return null; } + private static ObjectInspector nullStructOI = new NullStructSerDeObjectInspector(); + @Override public ObjectInspector getObjectInspector() throws SerDeException { - return new StructObjectInspector() { - public String getTypeName() { - return "null"; - } - public Category getCategory() { - return Category.PRIMITIVE; - } - @Override - public StructField getStructFieldRef(String fieldName) { - return null; - } - @Override - public List getAllStructFieldRefs() { - return new ArrayList(); - } - @Override - public Object getStructFieldData(Object data, StructField fieldRef) { - return null; - } - @Override - public List getStructFieldsDataAsList(Object data) { - return new ArrayList(); - } - }; + return nullStructOI; } @Override @@ -103,4 +82,38 @@ return NullWritable.get(); } + + /** + * A object inspector for null struct serde. + */ + public static class NullStructSerDeObjectInspector extends StructObjectInspector { + public String getTypeName() { + return "null"; + } + + public Category getCategory() { + return Category.PRIMITIVE; + } + + @Override + public StructField getStructFieldRef(String fieldName) { + return null; + } + + @Override + public List getAllStructFieldRefs() { + return new ArrayList(); + } + + @Override + public Object getStructFieldData(Object data, StructField fieldRef) { + return null; + } + + @Override + public List getStructFieldsDataAsList(Object data) { + return new ArrayList(); + } + } + } Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/SettableStructObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/SettableStructObjectInspector.java (revision 1426896) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/SettableStructObjectInspector.java (working copy) @@ -34,4 +34,9 @@ */ public abstract Object setStructFieldData(Object struct, StructField field, Object fieldValue); + + @Override + public boolean isSettable() { + return true; + } } Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StructObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StructObjectInspector.java (revision 1426896) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StructObjectInspector.java (working copy) @@ -47,6 +47,10 @@ */ public abstract List getStructFieldsDataAsList(Object data); + public boolean isSettable() { + return false; + } + @Override public String toString() { StringBuilder sb = new StringBuilder(); Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (revision 1426896) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (working copy) @@ -22,10 +22,11 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaStringObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableBinaryObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableBooleanObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableBinaryObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableByteObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableDoubleObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableFloatObjectInspector; @@ -33,8 +34,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector; /** * ObjectInspectorConverters. @@ -59,6 +60,61 @@ } } + private static Converter getConverter(PrimitiveObjectInspector inputOI, + PrimitiveObjectInspector outputOI) { + switch (outputOI.getPrimitiveCategory()) { + case BOOLEAN: + return new PrimitiveObjectInspectorConverter.BooleanConverter( + inputOI, + (SettableBooleanObjectInspector) outputOI); + case BYTE: + return new PrimitiveObjectInspectorConverter.ByteConverter( + inputOI, + (SettableByteObjectInspector) outputOI); + case SHORT: + return new PrimitiveObjectInspectorConverter.ShortConverter( + inputOI, + (SettableShortObjectInspector) outputOI); + case INT: + return new PrimitiveObjectInspectorConverter.IntConverter( + inputOI, + (SettableIntObjectInspector) outputOI); + case LONG: + return new PrimitiveObjectInspectorConverter.LongConverter( + inputOI, + (SettableLongObjectInspector) outputOI); + case FLOAT: + return new PrimitiveObjectInspectorConverter.FloatConverter( + inputOI, + (SettableFloatObjectInspector) outputOI); + case DOUBLE: + return new PrimitiveObjectInspectorConverter.DoubleConverter( + inputOI, + (SettableDoubleObjectInspector) outputOI); + case STRING: + if (outputOI instanceof WritableStringObjectInspector) { + return new PrimitiveObjectInspectorConverter.TextConverter( + inputOI); + } else if (outputOI instanceof JavaStringObjectInspector) { + return new PrimitiveObjectInspectorConverter.StringConverter( + inputOI); + } + case TIMESTAMP: + return new PrimitiveObjectInspectorConverter.TimestampConverter( + inputOI, + (SettableTimestampObjectInspector) outputOI); + case BINARY: + return new PrimitiveObjectInspectorConverter.BinaryConverter( + inputOI, + (SettableBinaryObjectInspector)outputOI); + + default: + throw new RuntimeException("Hive internal error: conversion of " + + inputOI.getTypeName() + " to " + outputOI.getTypeName() + + " not supported yet."); + } + } + /** * Returns a converter that converts objects from one OI to another OI. The * returned (converted) object belongs to this converter, so that it can be @@ -73,57 +129,7 @@ } switch (outputOI.getCategory()) { case PRIMITIVE: - switch (((PrimitiveObjectInspector) outputOI).getPrimitiveCategory()) { - case BOOLEAN: - return new PrimitiveObjectInspectorConverter.BooleanConverter( - (PrimitiveObjectInspector) inputOI, - (SettableBooleanObjectInspector) outputOI); - case BYTE: - return new PrimitiveObjectInspectorConverter.ByteConverter( - (PrimitiveObjectInspector) inputOI, - (SettableByteObjectInspector) outputOI); - case SHORT: - return new PrimitiveObjectInspectorConverter.ShortConverter( - (PrimitiveObjectInspector) inputOI, - (SettableShortObjectInspector) outputOI); - case INT: - return new PrimitiveObjectInspectorConverter.IntConverter( - (PrimitiveObjectInspector) inputOI, - (SettableIntObjectInspector) outputOI); - case LONG: - return new PrimitiveObjectInspectorConverter.LongConverter( - (PrimitiveObjectInspector) inputOI, - (SettableLongObjectInspector) outputOI); - case FLOAT: - return new PrimitiveObjectInspectorConverter.FloatConverter( - (PrimitiveObjectInspector) inputOI, - (SettableFloatObjectInspector) outputOI); - case DOUBLE: - return new PrimitiveObjectInspectorConverter.DoubleConverter( - (PrimitiveObjectInspector) inputOI, - (SettableDoubleObjectInspector) outputOI); - case STRING: - if (outputOI instanceof WritableStringObjectInspector) { - return new PrimitiveObjectInspectorConverter.TextConverter( - (PrimitiveObjectInspector) inputOI); - } else if (outputOI instanceof JavaStringObjectInspector) { - return new PrimitiveObjectInspectorConverter.StringConverter( - (PrimitiveObjectInspector) inputOI); - } - case TIMESTAMP: - return new PrimitiveObjectInspectorConverter.TimestampConverter( - (PrimitiveObjectInspector) inputOI, - (SettableTimestampObjectInspector) outputOI); - case BINARY: - return new PrimitiveObjectInspectorConverter.BinaryConverter( - (PrimitiveObjectInspector)inputOI, - (SettableBinaryObjectInspector)outputOI); - - default: - throw new RuntimeException("Hive internal error: conversion of " - + inputOI.getTypeName() + " to " + outputOI.getTypeName() - + " not supported yet."); - } + return getConverter((PrimitiveObjectInspector) inputOI, (PrimitiveObjectInspector) outputOI); case STRUCT: return new StructConverter(inputOI, (SettableStructObjectInspector) outputOI); @@ -140,7 +146,107 @@ } } + public static ObjectInspector getConverteredOI( + ObjectInspector inputOI, + ObjectInspector outputOI) { + // If the inputOI is the same as the outputOI, just return it + if (inputOI == outputOI) { + return outputOI; + } + switch (outputOI.getCategory()) { + case PRIMITIVE: + return outputOI; + case STRUCT: + StructObjectInspector structOutputOI = (StructObjectInspector) outputOI; + if (structOutputOI.isSettable()) { + return outputOI; + } + else { + // create a standard settable struct object inspector + List listFields = structOutputOI.getAllStructFieldRefs(); + List structFieldNames = new ArrayList(listFields.size()); + List structFieldObjectInspectors = new ArrayList( + listFields.size()); + + for (StructField listField : listFields) { + structFieldNames.add(listField.getFieldName()); + structFieldObjectInspectors.add(listField.getFieldObjectInspector()); + } + + StandardStructObjectInspector structStandardOutputOI = ObjectInspectorFactory + .getStandardStructObjectInspector( + structFieldNames, + structFieldObjectInspectors); + return structStandardOutputOI; + } + case LIST: + return outputOI; + case MAP: + return outputOI; + default: + throw new RuntimeException("Hive internal error: conversion of " + + inputOI.getTypeName() + " to " + outputOI.getTypeName() + + " not supported yet."); + } + } + /** + * Returns a converter that converts objects from one OI to another OI. The + * returned (converted) object belongs to this converter, so that it can be + * reused across different calls. + */ + public static ObjectPair getConverterOutputOI( + ObjectInspector inputOI, + ObjectInspector outputOI, + boolean identityConverterOK) { + // If the inputOI is the same as the outputOI, just return an + // IdentityConverter. + if (identityConverterOK && (inputOI == outputOI)) { + return new ObjectPair(outputOI, new IdentityConverter()); + } + switch (outputOI.getCategory()) { + case PRIMITIVE: + return new ObjectPair(outputOI, getConverter( + (PrimitiveObjectInspector) inputOI, (PrimitiveObjectInspector) outputOI)); + case STRUCT: + StructObjectInspector structOutputOI = (StructObjectInspector) outputOI; + if (structOutputOI.isSettable()) { + return new ObjectPair(outputOI, + (Converter) (new StructConverter(inputOI, (SettableStructObjectInspector) outputOI))); + } + else { + // create a standard settable struct object inspector + List listFields = structOutputOI.getAllStructFieldRefs(); + List structFieldNames = new ArrayList(listFields.size()); + List structFieldObjectInspectors = new ArrayList( + listFields.size()); + + for (StructField listField : listFields) { + structFieldNames.add(listField.getFieldName()); + structFieldObjectInspectors.add(listField.getFieldObjectInspector()); + } + + StandardStructObjectInspector structStandardOutputOI = ObjectInspectorFactory + .getStandardStructObjectInspector( + structFieldNames, + structFieldObjectInspectors); + return new ObjectPair(structStandardOutputOI, + getConverter(inputOI, structStandardOutputOI)); + } + case LIST: + return new ObjectPair(outputOI, new ListConverter(inputOI, + (SettableListObjectInspector) outputOI)); + case MAP: + return new ObjectPair(outputOI, new MapConverter(inputOI, + (SettableMapObjectInspector) outputOI)); + default: + throw new RuntimeException("Hive internal error: conversion of " + + inputOI.getTypeName() + " to " + outputOI.getTypeName() + + " not supported yet."); + } + } + + /** * A converter class for List. */ public static class ListConverter implements Converter { @@ -221,10 +327,11 @@ this.outputOI = outputOI; inputFields = this.inputOI.getAllStructFieldRefs(); outputFields = outputOI.getAllStructFieldRefs(); - assert (inputFields.size() == outputFields.size()); - fieldConverters = new ArrayList(inputFields.size()); - for (int f = 0; f < inputFields.size(); f++) { + // If the output has some extra fields, set them to NULL. + int minFields = Math.min(inputFields.size(), outputFields.size()); + fieldConverters = new ArrayList(minFields); + for (int f = 0; f < minFields; f++) { fieldConverters.add(getConverter(inputFields.get(f) .getFieldObjectInspector(), outputFields.get(f) .getFieldObjectInspector())); @@ -243,15 +350,19 @@ return null; } + int minFields = Math.min(inputFields.size(), outputFields.size()); // Convert the fields - for (int f = 0; f < inputFields.size(); f++) { - Object inputFieldValue = inputOI.getStructFieldData(input, inputFields - .get(f)); - Object outputFieldValue = fieldConverters.get(f).convert( - inputFieldValue); - outputOI.setStructFieldData(output, outputFields.get(f), - outputFieldValue); + for (int f = 0; f < minFields; f++) { + Object inputFieldValue = inputOI.getStructFieldData(input, inputFields.get(f)); + Object outputFieldValue = fieldConverters.get(f).convert(inputFieldValue); + outputOI.setStructFieldData(output, outputFields.get(f), outputFieldValue); } + + // set the extra fields to null + for (int f = minFields; f < outputFields.size(); f++) { + outputOI.setStructFieldData(output, outputFields.get(f), null); + } + return output; } } Index: ql/src/test/results/clientpositive/bucketmapjoin5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin5.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin5.q.out (working copy) @@ -253,7 +253,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -301,7 +300,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -809,7 +807,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -857,7 +854,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 Index: ql/src/test/results/clientpositive/pcr.q.out =================================================================== --- ql/src/test/results/clientpositive/pcr.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/pcr.q.out (working copy) @@ -123,7 +123,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -169,7 +168,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -321,7 +319,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -367,7 +364,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -413,7 +409,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -609,7 +604,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -655,7 +649,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -815,7 +808,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -861,7 +853,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -1023,7 +1014,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -1069,7 +1059,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -1115,7 +1104,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -1288,7 +1276,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -1334,7 +1321,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -1380,7 +1366,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -1557,7 +1542,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -1603,7 +1587,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -1742,7 +1725,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -1788,7 +1770,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -1967,7 +1948,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -2013,7 +1993,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -2059,7 +2038,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -2272,7 +2250,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -2318,7 +2295,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -2475,7 +2451,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -2751,7 +2726,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -2797,7 +2771,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 @@ -3088,7 +3061,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 @@ -3134,7 +3106,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 @@ -3180,7 +3151,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 @@ -3226,7 +3196,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 @@ -3428,7 +3397,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 @@ -3474,7 +3442,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 @@ -3520,7 +3487,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 @@ -3794,7 +3760,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 @@ -4352,7 +4317,6 @@ #### A masked pattern was here #### name default.pcr_t1 numFiles 1 - numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 @@ -4906,7 +4870,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -5077,7 +5040,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -5124,7 +5086,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -5301,7 +5262,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -5348,7 +5308,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/load_dyn_part8.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part8.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/load_dyn_part8.q.out (working copy) @@ -157,7 +157,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -204,7 +203,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -251,7 +249,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -298,7 +295,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/sample9.q.out =================================================================== --- ql/src/test/results/clientpositive/sample9.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sample9.q.out (working copy) @@ -72,16 +72,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -104,7 +98,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket - name: default.srcbucket + name: null.null Truncated Path -> Alias: /srcbucket/srcbucket0.txt [s:a] Index: ql/src/test/results/clientpositive/sample4.q.out =================================================================== --- ql/src/test/results/clientpositive/sample4.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sample4.q.out (working copy) @@ -87,16 +87,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -119,7 +113,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket - name: default.srcbucket + name: null.null Truncated Path -> Alias: /srcbucket/srcbucket0.txt [s] Index: ql/src/test/results/clientpositive/groupby_sort_1.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_1.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/groupby_sort_1.q.out (working copy) @@ -129,22 +129,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -168,7 +161,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [t1] @@ -460,22 +453,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -499,7 +485,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -732,22 +718,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -771,7 +750,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [subq1:t1] @@ -1137,22 +1116,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1176,7 +1148,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [subq1:t1] @@ -1562,22 +1534,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1601,7 +1566,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [t1] @@ -1939,22 +1904,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1978,7 +1936,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -2215,22 +2173,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2254,7 +2205,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -2530,22 +2481,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2569,7 +2513,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -2937,22 +2881,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2976,7 +2913,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [null-subquery1:subq1-subquery1:t1, null-subquery2:subq1-subquery2:t1] @@ -3376,22 +3313,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -3415,7 +3345,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -3603,22 +3533,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -3642,7 +3565,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [null-subquery1:subq1-subquery1:t1] #### A masked pattern was here #### @@ -4089,22 +4012,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -4128,7 +4044,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -4410,22 +4326,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -4449,7 +4358,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -4574,22 +4483,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -4613,7 +4515,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -4832,22 +4734,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t2 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -4871,7 +4766,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - name: default.t2 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -5196,22 +5091,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t2 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -5235,7 +5123,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - name: default.t2 + name: null.null Truncated Path -> Alias: /t2 [t2] @@ -5740,22 +5628,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t2 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -5779,7 +5660,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - name: default.t2 + name: null.null Truncated Path -> Alias: /t2 [t2] @@ -6236,22 +6117,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t2 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -6275,7 +6149,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - name: default.t2 + name: null.null Truncated Path -> Alias: /t2 [subq:t2] @@ -6794,22 +6668,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t2 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -6833,7 +6700,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - name: default.t2 + name: null.null Truncated Path -> Alias: /t2 [subq2:subq:t2] Index: ql/src/test/results/clientpositive/filter_join_breaktask.q.out =================================================================== --- ql/src/test/results/clientpositive/filter_join_breaktask.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/filter_join_breaktask.q.out (working copy) @@ -101,7 +101,6 @@ #### A masked pattern was here #### name default.filter_join_breaktask numFiles 1 - numPartitions 1 numRows 25 partition_columns ds rawDataSize 211 @@ -231,7 +230,6 @@ #### A masked pattern was here #### name default.filter_join_breaktask numFiles 1 - numPartitions 1 numRows 25 partition_columns ds rawDataSize 211 Index: ql/src/test/results/clientpositive/join17.q.out =================================================================== --- ql/src/test/results/clientpositive/join17.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/join17.q.out (working copy) @@ -73,16 +73,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -104,7 +98,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out =================================================================== --- ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out (working copy) @@ -120,7 +120,6 @@ partition values: part 1 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 1 bucket_field_name key columns key,value @@ -128,7 +127,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 1 - numPartitions 1 numRows 500 partition_columns part rawDataSize 5312 Index: ql/src/test/results/clientpositive/input_part9.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part9.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/input_part9.q.out (working copy) @@ -71,7 +71,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -118,7 +117,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/join35.q.out =================================================================== --- ql/src/test/results/clientpositive/join35.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/join35.q.out (working copy) @@ -92,16 +92,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -123,7 +117,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -583,16 +577,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -614,7 +602,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/ppd_join_filter.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join_filter.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/ppd_join_filter.q.out (working copy) @@ -80,16 +80,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -111,7 +105,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -220,16 +214,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -251,7 +239,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -412,16 +400,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -443,7 +425,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -552,16 +534,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -583,7 +559,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -744,16 +720,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -775,7 +745,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -884,16 +854,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -915,7 +879,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -1076,16 +1040,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1107,7 +1065,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -1216,16 +1174,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1247,7 +1199,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join9.q.out =================================================================== --- ql/src/test/results/clientpositive/join9.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/join9.q.out (working copy) @@ -73,16 +73,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -104,7 +98,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null #### A masked pattern was here #### Partition base file name: hr=12 @@ -120,7 +114,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/join_filters_overlap.q.out =================================================================== --- ql/src/test/results/clientpositive/join_filters_overlap.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/join_filters_overlap.q.out (working copy) @@ -104,16 +104,10 @@ columns key,value columns.types int:int #### A masked pattern was here #### - name default.a - numFiles 1 - numPartitions 0 - numRows 3 - rawDataSize 18 - serialization.ddl struct a { i32 key, i32 value} + name null.null + serialization.ddl struct null { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -135,7 +129,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a - name: default.a + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -310,16 +304,10 @@ columns key,value columns.types int:int #### A masked pattern was here #### - name default.a - numFiles 1 - numPartitions 0 - numRows 3 - rawDataSize 18 - serialization.ddl struct a { i32 key, i32 value} + name null.null + serialization.ddl struct null { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -341,7 +329,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a - name: default.a + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -516,16 +504,10 @@ columns key,value columns.types int:int #### A masked pattern was here #### - name default.a - numFiles 1 - numPartitions 0 - numRows 3 - rawDataSize 18 - serialization.ddl struct a { i32 key, i32 value} + name null.null + serialization.ddl struct null { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -547,7 +529,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a - name: default.a + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -740,16 +722,10 @@ columns key,value columns.types int:int #### A masked pattern was here #### - name default.a - numFiles 1 - numPartitions 0 - numRows 3 - rawDataSize 18 - serialization.ddl struct a { i32 key, i32 value} + name null.null + serialization.ddl struct null { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -771,7 +747,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a - name: default.a + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -978,16 +954,10 @@ columns key,value columns.types int:int #### A masked pattern was here #### - name default.a - numFiles 1 - numPartitions 0 - numRows 3 - rawDataSize 18 - serialization.ddl struct a { i32 key, i32 value} + name null.null + serialization.ddl struct null { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1009,7 +979,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a - name: default.a + name: null.null Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/rand_partitionpruner3.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (working copy) @@ -73,7 +73,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -199,7 +198,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketcontext_4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_4.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketcontext_4.q.out (working copy) @@ -162,7 +162,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -170,7 +169,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 1 numRows 0 partition_columns ds rawDataSize 0 @@ -353,7 +351,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -361,7 +358,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 1 numRows 0 partition_columns ds rawDataSize 0 Index: ql/src/test/results/clientpositive/bucket4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket4.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucket4.q.out (working copy) @@ -62,16 +62,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -93,7 +87,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Select Operator Index: ql/src/test/results/clientpositive/udtf_explode.q.out =================================================================== --- ql/src/test/results/clientpositive/udtf_explode.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/udtf_explode.q.out (working copy) @@ -66,16 +66,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -97,7 +91,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [src] @@ -154,16 +148,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -185,7 +173,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Limit @@ -420,16 +408,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -451,7 +433,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Limit @@ -576,7 +558,6 @@ Fetch Operator limit: -1 - PREHOOK: query: SELECT explode(map(1,'one',2,'two',3,'three')) as (myKey,myVal) FROM src LIMIT 3 PREHOOK: type: QUERY PREHOOK: Input: default@src Index: ql/src/test/results/clientpositive/merge3.q.out =================================================================== --- ql/src/test/results/clientpositive/merge3.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/merge3.q.out (working copy) @@ -110,16 +110,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.merge_src - numFiles 4 - numPartitions 0 - numRows 2000 - rawDataSize 21248 - serialization.ddl struct merge_src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -141,7 +135,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src - name: default.merge_src + name: null.null Truncated Path -> Alias: /merge_src [merge_src] @@ -2437,7 +2431,6 @@ #### A masked pattern was here #### name default.merge_src_part numFiles 2 - numPartitions 2 numRows 1000 partition_columns ds rawDataSize 10624 @@ -2483,7 +2476,6 @@ #### A masked pattern was here #### name default.merge_src_part numFiles 2 - numPartitions 2 numRows 1000 partition_columns ds rawDataSize 10624 @@ -4865,7 +4857,6 @@ #### A masked pattern was here #### name default.merge_src_part numFiles 2 - numPartitions 2 numRows 1000 partition_columns ds rawDataSize 10624 @@ -4911,7 +4902,6 @@ #### A masked pattern was here #### name default.merge_src_part numFiles 2 - numPartitions 2 numRows 1000 partition_columns ds rawDataSize 10624 Index: ql/src/test/results/clientpositive/binary_output_format.q.out =================================================================== --- ql/src/test/results/clientpositive/binary_output_format.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/binary_output_format.q.out (working copy) @@ -127,16 +127,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -158,7 +152,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [src] Index: ql/src/test/results/clientpositive/bucketmapjoin9.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin9.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin9.q.out (working copy) @@ -151,7 +151,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 1 numRows 0 partition_columns part rawDataSize 0 @@ -413,7 +412,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 1 numRows 0 partition_columns part rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketmapjoin13.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin13.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin13.q.out (working copy) @@ -179,7 +179,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 2 numRows 500 partition_columns part rawDataSize 5312 @@ -227,7 +226,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 2 numRows 500 partition_columns part rawDataSize 5312 @@ -474,7 +472,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 2 numRows 500 partition_columns part rawDataSize 5312 @@ -732,7 +729,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 2 numRows 500 partition_columns part rawDataSize 5312 @@ -992,7 +988,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 2 numRows 500 partition_columns part rawDataSize 5312 Index: ql/src/test/results/clientpositive/bucketmapjoin4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin4.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin4.q.out (working copy) @@ -211,16 +211,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket_mapjoin - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -243,7 +237,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin - name: default.srcbucket_mapjoin + name: null.null Truncated Path -> Alias: /srcbucket_mapjoin [a] @@ -706,16 +700,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket_mapjoin - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -738,7 +726,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin - name: default.srcbucket_mapjoin + name: null.null Truncated Path -> Alias: /srcbucket_mapjoin [b] Index: ql/src/test/results/clientpositive/smb_mapjoin_13.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_13.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/smb_mapjoin_13.q.out (working copy) @@ -125,22 +125,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key columns key,value columns.types int:string #### A masked pattern was here #### - name default.test_table1 - numFiles 16 - numPartitions 0 - numRows 500 - rawDataSize 5312 - serialization.ddl struct test_table1 { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -164,7 +157,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 - name: default.test_table1 + name: null.null Truncated Path -> Alias: /test_table1 [a] @@ -380,22 +373,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key columns key,value columns.types int:string #### A masked pattern was here #### - name default.test_table3 - numFiles 16 - numPartitions 0 - numRows 500 - rawDataSize 5312 - serialization.ddl struct test_table3 { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -419,7 +405,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 - name: default.test_table3 + name: null.null Truncated Path -> Alias: /test_table3 [a] Index: ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out (working copy) @@ -129,22 +129,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -168,7 +161,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [t1] @@ -459,22 +452,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -498,7 +484,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -801,22 +787,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -840,7 +819,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [subq1:t1] @@ -1206,22 +1185,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1245,7 +1217,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [subq1:t1] @@ -1631,22 +1603,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1670,7 +1635,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [t1] @@ -2005,22 +1970,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2044,7 +2002,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -2356,22 +2314,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2395,7 +2346,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -2742,22 +2693,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2781,7 +2725,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -3213,22 +3157,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -3252,7 +3189,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [null-subquery1:subq1-subquery1:t1, null-subquery2:subq1-subquery2:t1] @@ -3653,22 +3590,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -3692,7 +3622,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -3944,22 +3874,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -3983,7 +3906,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Truncated Path -> Alias: /t1 [null-subquery1:subq1-subquery1:t1] #### A masked pattern was here #### @@ -4430,22 +4353,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -4469,7 +4385,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -4750,22 +4666,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -4789,7 +4698,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -4984,22 +4893,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t1 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -5023,7 +4925,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - name: default.t1 + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -5243,22 +5145,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t2 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -5282,7 +5177,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - name: default.t2 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -5671,22 +5566,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t2 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -5710,7 +5598,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - name: default.t2 + name: null.null Truncated Path -> Alias: /t2 [t2] @@ -6215,22 +6103,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t2 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -6254,7 +6135,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - name: default.t2 + name: null.null Truncated Path -> Alias: /t2 [t2] @@ -6711,22 +6592,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t2 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -6750,7 +6624,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - name: default.t2 + name: null.null Truncated Path -> Alias: /t2 [subq:t2] @@ -7269,22 +7143,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,val columns.types string:string #### A masked pattern was here #### - name default.t2 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2 { string key, string val} + name null.null + serialization.ddl struct null { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -7308,7 +7175,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - name: default.t2 + name: null.null Truncated Path -> Alias: /t2 [subq2:subq:t2] Index: ql/src/test/results/clientpositive/columnstats_partlvl.q.out =================================================================== --- ql/src/test/results/clientpositive/columnstats_partlvl.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/columnstats_partlvl.q.out (working copy) @@ -145,7 +145,6 @@ #### A masked pattern was here #### name default.employee_part numFiles 1 - numPartitions 2 numRows 0 partition_columns employeesalary rawDataSize 0 @@ -350,7 +349,6 @@ #### A masked pattern was here #### name default.employee_part numFiles 1 - numPartitions 2 numRows 0 partition_columns employeesalary rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out (working copy) @@ -219,22 +219,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name key columns key,value columns.types string:string #### A masked pattern was here #### - name default.test1 - numFiles 3 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct test1 { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -258,7 +251,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 - name: default.test1 + name: null.null Truncated Path -> Alias: /test1 [l] @@ -378,22 +371,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name value columns key,value columns.types string:string #### A masked pattern was here #### - name default.test2 - numFiles 3 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct test2 { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -417,7 +403,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 - name: default.test2 + name: null.null Truncated Path -> Alias: /test2 [l] @@ -532,22 +518,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name key columns key,value columns.types string:string #### A masked pattern was here #### - name default.test1 - numFiles 3 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct test1 { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -571,7 +550,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 - name: default.test1 + name: null.null Truncated Path -> Alias: /test1 [l] @@ -684,22 +663,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name key columns key,value columns.types string:string #### A masked pattern was here #### - name default.test1 - numFiles 3 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct test1 { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -723,7 +695,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 - name: default.test1 + name: null.null Truncated Path -> Alias: /test1 [l] @@ -836,22 +808,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name key columns key,value columns.types string:string #### A masked pattern was here #### - name default.test1 - numFiles 3 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct test1 { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -875,7 +840,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 - name: default.test1 + name: null.null Truncated Path -> Alias: /test1 [l] @@ -988,22 +953,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name key columns key,value columns.types string:string #### A masked pattern was here #### - name default.test1 - numFiles 3 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct test1 { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1027,7 +985,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 - name: default.test1 + name: null.null Truncated Path -> Alias: /test1 [l] @@ -1140,22 +1098,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name value columns key,value columns.types string:string #### A masked pattern was here #### - name default.test2 - numFiles 3 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct test2 { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1179,7 +1130,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 - name: default.test2 + name: null.null Truncated Path -> Alias: /test2 [l] @@ -1292,22 +1243,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name value columns key,value columns.types string:string #### A masked pattern was here #### - name default.test2 - numFiles 3 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct test2 { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1331,7 +1275,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 - name: default.test2 + name: null.null Truncated Path -> Alias: /test2 [l] @@ -1444,22 +1388,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name key columns key,value columns.types string:string #### A masked pattern was here #### - name default.test3 - numFiles 3 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct test3 { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1483,7 +1420,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test3 - name: default.test3 + name: null.null Truncated Path -> Alias: /test3 [l] Index: ql/src/test/results/clientpositive/sample8.q.out =================================================================== --- ql/src/test/results/clientpositive/sample8.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sample8.q.out (working copy) @@ -85,7 +85,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -132,7 +131,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -179,7 +177,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -226,7 +223,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/router_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/router_join_ppr.q.out (working copy) @@ -89,16 +89,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -120,7 +114,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null #### A masked pattern was here #### Partition base file name: hr=11 @@ -136,7 +130,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -183,7 +176,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -230,7 +222,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -277,7 +268,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -497,16 +487,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -528,7 +512,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null #### A masked pattern was here #### Partition base file name: hr=11 @@ -544,7 +528,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -591,7 +574,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -802,16 +784,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -833,7 +809,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null #### A masked pattern was here #### Partition base file name: hr=11 @@ -849,7 +825,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -896,7 +871,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -1107,16 +1081,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1138,7 +1106,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null #### A masked pattern was here #### Partition base file name: hr=11 @@ -1154,7 +1122,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -1201,7 +1168,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -1248,7 +1214,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -1295,7 +1260,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/input42.q.out =================================================================== --- ql/src/test/results/clientpositive/input42.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/input42.q.out (working copy) @@ -66,7 +66,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -113,7 +112,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -1256,7 +1254,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -1303,7 +1300,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -1824,7 +1820,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -1871,7 +1866,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/union24.q.out =================================================================== --- ql/src/test/results/clientpositive/union24.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/union24.q.out (working copy) @@ -111,16 +111,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src5 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src5 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -142,7 +136,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src5 - name: default.src5 + name: null.null Reduce Operator Tree: Group By Operator aggregations: @@ -346,16 +340,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src2 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src2 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -377,7 +365,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src2 - name: default.src2 + name: null.null #### A masked pattern was here #### Partition base file name: src3 @@ -388,16 +376,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src3 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src3 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -419,7 +401,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src3 - name: default.src3 + name: null.null #### A masked pattern was here #### Partition base file name: src4 @@ -430,16 +412,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src4 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src4 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -461,7 +437,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src4 - name: default.src4 + name: null.null Reduce Operator Tree: Extract File Output Operator @@ -636,16 +612,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src4 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src4 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -667,7 +637,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src4 - name: default.src4 + name: null.null #### A masked pattern was here #### Partition base file name: src5 @@ -678,16 +648,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src5 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src5 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -709,7 +673,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src5 - name: default.src5 + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -876,16 +840,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src2 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src2 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -907,7 +865,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src2 - name: default.src2 + name: null.null #### A masked pattern was here #### Partition base file name: src3 @@ -918,16 +876,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src3 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src3 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -949,7 +901,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src3 - name: default.src3 + name: null.null Reduce Operator Tree: Extract File Output Operator @@ -1111,16 +1063,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src4 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src4 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1142,7 +1088,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src4 - name: default.src4 + name: null.null #### A masked pattern was here #### Partition base file name: src5 @@ -1153,16 +1099,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src5 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src5 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1184,7 +1124,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src5 - name: default.src5 + name: null.null Reduce Operator Tree: Join Operator condition map: @@ -1429,16 +1369,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src2 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src2 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1460,7 +1394,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src2 - name: default.src2 + name: null.null #### A masked pattern was here #### Partition base file name: src3 @@ -1471,16 +1405,10 @@ columns key,count columns.types string:bigint #### A masked pattern was here #### - name default.src3 - numFiles 1 - numPartitions 0 - numRows 309 - rawDataSize 1482 - serialization.ddl struct src3 { string key, i64 count} + name null.null + serialization.ddl struct null { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1502,7 +1430,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src3 - name: default.src3 + name: null.null Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/join34.q.out =================================================================== --- ql/src/test/results/clientpositive/join34.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/join34.q.out (working copy) @@ -220,16 +220,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -251,7 +245,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [null-subquery1:subq1-subquery1:x, null-subquery2:subq1-subquery2:x1] Index: ql/src/test/results/clientpositive/louter_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/louter_join_ppr.q.out (working copy) @@ -87,16 +87,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -118,7 +112,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null #### A masked pattern was here #### Partition base file name: hr=11 @@ -134,7 +128,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -181,7 +174,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -392,16 +384,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -423,7 +409,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null #### A masked pattern was here #### Partition base file name: hr=11 @@ -439,7 +425,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -486,7 +471,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -533,7 +517,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -580,7 +563,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -802,16 +784,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -833,7 +809,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null #### A masked pattern was here #### Partition base file name: hr=11 @@ -849,7 +825,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -896,7 +871,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -943,7 +917,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -990,7 +963,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -1207,16 +1179,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1238,7 +1204,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null #### A masked pattern was here #### Partition base file name: hr=11 @@ -1254,7 +1220,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -1301,7 +1266,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketcontext_8.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_8.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketcontext_8.q.out (working copy) @@ -175,7 +175,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key columns key,value @@ -183,7 +182,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 4 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -225,7 +223,6 @@ partition values: ds 2008-04-09 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key columns key,value @@ -233,7 +230,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 4 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -419,7 +415,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key columns key,value @@ -427,7 +422,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 4 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -469,7 +463,6 @@ partition values: ds 2008-04-09 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key columns key,value @@ -477,7 +470,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 4 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 Index: ql/src/test/results/clientpositive/rand_partitionpruner2.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (working copy) @@ -95,7 +95,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -142,7 +141,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketcontext_3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_3.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketcontext_3.q.out (working copy) @@ -150,7 +150,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key columns key,value @@ -158,7 +157,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 4 - numPartitions 1 numRows 0 partition_columns ds rawDataSize 0 @@ -341,7 +339,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key columns key,value @@ -349,7 +346,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 4 - numPartitions 1 numRows 0 partition_columns ds rawDataSize 0 Index: ql/src/test/results/clientpositive/bucket3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket3.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucket3.q.out (working copy) @@ -59,16 +59,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -90,7 +84,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Select Operator Index: ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (working copy) @@ -72,7 +72,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -119,7 +118,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketmapjoin8.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin8.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin8.q.out (working copy) @@ -152,7 +152,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 1 numRows 0 partition_columns part rawDataSize 0 @@ -392,7 +391,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 1 numRows 0 partition_columns part rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketmapjoin12.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin12.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin12.q.out (working copy) @@ -180,7 +180,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 1 numRows 0 partition_columns part rawDataSize 0 @@ -405,7 +404,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 1 numRows 0 partition_columns part rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin3.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out (working copy) @@ -215,7 +215,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 - numPartitions 1 numRows 0 partition_columns ds rawDataSize 0 @@ -718,7 +717,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 - numPartitions 1 numRows 0 partition_columns ds rawDataSize 0 Index: ql/src/test/results/clientpositive/smb_mapjoin_12.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_12.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/smb_mapjoin_12.q.out (working copy) @@ -157,7 +157,6 @@ partition values: ds 1 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key columns key,value @@ -165,7 +164,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 1 numRows 500 partition_columns ds rawDataSize 5312 @@ -390,7 +388,6 @@ partition values: ds 1 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key columns key,value @@ -398,7 +395,6 @@ #### A masked pattern was here #### name default.test_table3 numFiles 16 - numPartitions 1 numRows 3084 partition_columns ds rawDataSize 32904 Index: ql/src/test/results/clientpositive/outer_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/outer_join_ppr.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/outer_join_ppr.q.out (working copy) @@ -79,16 +79,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -110,7 +104,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null #### A masked pattern was here #### Partition base file name: hr=11 @@ -126,7 +120,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -173,7 +166,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -220,7 +212,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -267,7 +258,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -479,16 +469,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -510,7 +494,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null #### A masked pattern was here #### Partition base file name: hr=11 @@ -526,7 +510,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -573,7 +556,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -620,7 +602,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -667,7 +648,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/udf_reflect.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_reflect.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/udf_reflect.q.out (working copy) @@ -99,16 +99,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -130,7 +124,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [src] Index: ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (working copy) @@ -184,16 +184,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket_mapjoin - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -216,7 +210,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin - name: default.srcbucket_mapjoin + name: null.null Truncated Path -> Alias: /srcbucket_mapjoin [a] Index: ql/src/test/results/clientpositive/sample7.q.out =================================================================== --- ql/src/test/results/clientpositive/sample7.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sample7.q.out (working copy) @@ -92,16 +92,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -124,7 +118,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket - name: default.srcbucket + name: null.null Truncated Path -> Alias: /srcbucket/srcbucket0.txt [s] Index: ql/src/test/results/clientpositive/bucket_map_join_2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket_map_join_2.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucket_map_join_2.q.out (working copy) @@ -122,22 +122,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 1 bucket_field_name key columns key,value columns.types string:string #### A masked pattern was here #### - name default.table1 - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct table1 { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -161,7 +154,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 - name: default.table1 + name: null.null Truncated Path -> Alias: /table1 [a] Index: ql/src/test/results/clientpositive/sample2.q.out =================================================================== --- ql/src/test/results/clientpositive/sample2.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sample2.q.out (working copy) @@ -87,16 +87,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -119,7 +113,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket - name: default.srcbucket + name: null.null Truncated Path -> Alias: /srcbucket/srcbucket0.txt [s] Index: ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out =================================================================== --- ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (working copy) @@ -59,16 +59,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -90,7 +84,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Select Operator Index: ql/src/test/results/clientpositive/partition_wise_fileformat9.q.out =================================================================== --- ql/src/test/results/clientpositive/partition_wise_fileformat9.q.out (revision 0) +++ ql/src/test/results/clientpositive/partition_wise_fileformat9.q.out (working copy) @@ -0,0 +1,109 @@ +PREHOOK: query: -- This tests that a query can span multiple partitions which can not only have different file formats, but +-- also different serdes +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- This tests that a query can span multiple partitions which can not only have different file formats, but +-- also different serdes +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@partition_test_partitioned +PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt='1') select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partition_test_partitioned@dt=1 +POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt='1') select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partition_test_partitioned@dt=1 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@partition_test_partitioned +PREHOOK: Output: default@partition_test_partitioned +POSTHOOK: query: alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@partition_test_partitioned +POSTHOOK: Output: default@partition_test_partitioned +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt='2') select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partition_test_partitioned@dt=2 +POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt='2') select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partition_test_partitioned@dt=2 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select * from partition_test_partitioned where dt is not null order by key, value, dt limit 20 +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +PREHOOK: Input: default@partition_test_partitioned@dt=2 +#### A masked pattern was here #### +POSTHOOK: query: select * from partition_test_partitioned where dt is not null order by key, value, dt limit 20 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +POSTHOOK: Input: default@partition_test_partitioned@dt=2 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +0 val_0 1 +0 val_0 1 +0 val_0 1 +0 val_0 2 +0 val_0 2 +0 val_0 2 +10 val_10 1 +10 val_10 2 +100 val_100 1 +100 val_100 1 +100 val_100 2 +100 val_100 2 +103 val_103 1 +103 val_103 1 +103 val_103 2 +103 val_103 2 +104 val_104 1 +104 val_104 1 +104 val_104 2 +104 val_104 2 +PREHOOK: query: select key+key as key, value, dt from partition_test_partitioned where dt is not null order by key, value, dt limit 20 +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +PREHOOK: Input: default@partition_test_partitioned@dt=2 +#### A masked pattern was here #### +POSTHOOK: query: select key+key as key, value, dt from partition_test_partitioned where dt is not null order by key, value, dt limit 20 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +POSTHOOK: Input: default@partition_test_partitioned@dt=2 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +0.0 val_0 1 +0.0 val_0 1 +0.0 val_0 1 +0.0 val_0 2 +0.0 val_0 2 +0.0 val_0 2 +4.0 val_2 1 +4.0 val_2 2 +8.0 val_4 1 +8.0 val_4 2 +10.0 val_5 1 +10.0 val_5 1 +10.0 val_5 1 +10.0 val_5 2 +10.0 val_5 2 +10.0 val_5 2 +16.0 val_8 1 +16.0 val_8 2 +18.0 val_9 1 +18.0 val_9 2 Index: ql/src/test/results/clientpositive/stats11.q.out =================================================================== --- ql/src/test/results/clientpositive/stats11.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/stats11.q.out (working copy) @@ -400,16 +400,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket_mapjoin - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -432,7 +426,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin - name: default.srcbucket_mapjoin + name: null.null Truncated Path -> Alias: /srcbucket_mapjoin [a] @@ -905,7 +899,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 - numPartitions 1 numRows 0 partition_columns ds rawDataSize 0 @@ -1149,7 +1142,6 @@ hdfs directory: true #### A masked pattern was here #### - PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b Index: ql/src/test/results/clientpositive/input23.q.out =================================================================== --- ql/src/test/results/clientpositive/input23.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/input23.q.out (working copy) @@ -71,7 +71,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/groupby_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/groupby_ppr.q.out (working copy) @@ -70,7 +70,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -117,7 +116,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/input_part7.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part7.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/input_part7.q.out (working copy) @@ -150,7 +150,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -197,7 +196,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/join33.q.out =================================================================== --- ql/src/test/results/clientpositive/join33.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/join33.q.out (working copy) @@ -97,16 +97,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -128,7 +122,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [y] @@ -210,7 +204,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/input_part2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part2.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/input_part2.q.out (working copy) @@ -167,7 +167,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -214,7 +213,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/partition_wise_fileformat12.q.out =================================================================== --- ql/src/test/results/clientpositive/partition_wise_fileformat12.q.out (revision 0) +++ ql/src/test/results/clientpositive/partition_wise_fileformat12.q.out (working copy) @@ -0,0 +1,208 @@ +PREHOOK: query: -- This tests that the schema can be changed for binary serde data +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- This tests that the schema can be changed for binary serde data +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@partition_test_partitioned +PREHOOK: query: alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@partition_test_partitioned +PREHOOK: Output: default@partition_test_partitioned +POSTHOOK: query: alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@partition_test_partitioned +POSTHOOK: Output: default@partition_test_partitioned +PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt='1') select * from src where key = 238 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partition_test_partitioned@dt=1 +POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt='1') select * from src where key = 238 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partition_test_partitioned@dt=1 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select * from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select * from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +238 val_238 1 +238 val_238 1 +PREHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +476.0 val_238 +476.0 val_238 +PREHOOK: query: alter table partition_test_partitioned change key key int +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@partition_test_partitioned +PREHOOK: Output: default@partition_test_partitioned +POSTHOOK: query: alter table partition_test_partitioned change key key int +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@partition_test_partitioned +POSTHOOK: Output: default@partition_test_partitioned +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +476 val_238 +476 val_238 +PREHOOK: query: select * from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select * from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +238 val_238 1 +238 val_238 1 +PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt='2') select * from src where key = 97 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partition_test_partitioned@dt=2 +POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt='2') select * from src where key = 97 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partition_test_partitioned@dt=2 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: alter table partition_test_partitioned add columns (value2 string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@partition_test_partitioned +PREHOOK: Output: default@partition_test_partitioned +POSTHOOK: query: alter table partition_test_partitioned add columns (value2 string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@partition_test_partitioned +POSTHOOK: Output: default@partition_test_partitioned +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +PREHOOK: Input: default@partition_test_partitioned@dt=2 +#### A masked pattern was here #### +POSTHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +POSTHOOK: Input: default@partition_test_partitioned@dt=2 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +476 val_238 +476 val_238 +194 val_97 +194 val_97 +PREHOOK: query: select * from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +PREHOOK: Input: default@partition_test_partitioned@dt=2 +#### A masked pattern was here #### +POSTHOOK: query: select * from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +POSTHOOK: Input: default@partition_test_partitioned@dt=2 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +238 val_238 NULL 1 +238 val_238 NULL 1 +97 val_97 NULL 2 +97 val_97 NULL 2 +PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt='3') select key, value, value from src where key = 200 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partition_test_partitioned@dt=3 +POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt='3') select key, value, value from src where key = 200 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partition_test_partitioned@dt=3 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).value2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select key+key, value, value2 from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +PREHOOK: Input: default@partition_test_partitioned@dt=2 +PREHOOK: Input: default@partition_test_partitioned@dt=3 +#### A masked pattern was here #### +POSTHOOK: query: select key+key, value, value2 from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +POSTHOOK: Input: default@partition_test_partitioned@dt=2 +POSTHOOK: Input: default@partition_test_partitioned@dt=3 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).value2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +476 val_238 NULL +476 val_238 NULL +194 val_97 NULL +194 val_97 NULL +400 val_200 val_200 +400 val_200 val_200 +PREHOOK: query: select * from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +PREHOOK: Input: default@partition_test_partitioned@dt=2 +PREHOOK: Input: default@partition_test_partitioned@dt=3 +#### A masked pattern was here #### +POSTHOOK: query: select * from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +POSTHOOK: Input: default@partition_test_partitioned@dt=2 +POSTHOOK: Input: default@partition_test_partitioned@dt=3 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).value2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +238 val_238 NULL 1 +238 val_238 NULL 1 +97 val_97 NULL 2 +97 val_97 NULL 2 +200 val_200 val_200 3 +200 val_200 val_200 3 Index: ql/src/test/results/clientpositive/groupby_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/groupby_map_ppr.q.out (working copy) @@ -87,7 +87,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -134,7 +133,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketcontext_7.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_7.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketcontext_7.q.out (working copy) @@ -175,7 +175,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -183,7 +182,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -225,7 +223,6 @@ partition values: ds 2008-04-09 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -233,7 +230,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -419,7 +415,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -427,7 +422,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -469,7 +463,6 @@ partition values: ds 2008-04-09 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -477,7 +470,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 Index: ql/src/test/results/clientpositive/combine2_hadoop20.q.out =================================================================== --- ql/src/test/results/clientpositive/combine2_hadoop20.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/combine2_hadoop20.q.out (working copy) @@ -248,7 +248,6 @@ #### A masked pattern was here #### name default.combine2 numFiles 1 - numPartitions 8 numRows 1 partition_columns value rawDataSize 2 @@ -294,7 +293,6 @@ #### A masked pattern was here #### name default.combine2 numFiles 1 - numPartitions 8 numRows 3 partition_columns value rawDataSize 3 @@ -340,7 +338,6 @@ #### A masked pattern was here #### name default.combine2 numFiles 1 - numPartitions 8 numRows 1 partition_columns value rawDataSize 1 @@ -386,7 +383,6 @@ #### A masked pattern was here #### name default.combine2 numFiles 1 - numPartitions 8 numRows 1 partition_columns value rawDataSize 1 @@ -432,7 +428,6 @@ #### A masked pattern was here #### name default.combine2 numFiles 1 - numPartitions 8 numRows 3 partition_columns value rawDataSize 3 @@ -478,7 +473,6 @@ #### A masked pattern was here #### name default.combine2 numFiles 1 - numPartitions 8 numRows 1 partition_columns value rawDataSize 1 @@ -524,7 +518,6 @@ #### A masked pattern was here #### name default.combine2 numFiles 1 - numPartitions 8 numRows 1 partition_columns value rawDataSize 1 @@ -570,7 +563,6 @@ #### A masked pattern was here #### name default.combine2 numFiles 1 - numPartitions 8 numRows 1 partition_columns value rawDataSize 2 Index: ql/src/test/results/clientpositive/rand_partitionpruner1.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (working copy) @@ -62,16 +62,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -93,7 +87,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [src] Index: ql/src/test/results/clientpositive/bucketcontext_2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_2.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketcontext_2.q.out (working copy) @@ -150,7 +150,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -158,7 +157,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -200,7 +198,6 @@ partition values: ds 2008-04-09 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -208,7 +205,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -392,7 +388,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -400,7 +395,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -442,7 +436,6 @@ partition values: ds 2008-04-09 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -450,7 +443,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 Index: ql/src/test/results/clientpositive/bucket2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket2.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucket2.q.out (working copy) @@ -59,16 +59,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -90,7 +84,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Select Operator Index: ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (working copy) @@ -97,7 +97,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -144,7 +143,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketmapjoin7.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin7.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin7.q.out (working copy) @@ -162,7 +162,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 1 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketmapjoin11.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin11.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin11.q.out (working copy) @@ -212,7 +212,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 2 numRows 0 partition_columns part rawDataSize 0 @@ -260,7 +259,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 4 - numPartitions 2 numRows 0 partition_columns part rawDataSize 0 @@ -493,7 +491,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 2 numRows 0 partition_columns part rawDataSize 0 @@ -541,7 +538,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 4 - numPartitions 2 numRows 0 partition_columns part rawDataSize 0 Index: ql/src/test/results/clientpositive/join26.q.out =================================================================== --- ql/src/test/results/clientpositive/join26.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/join26.q.out (working copy) @@ -156,7 +156,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (working copy) @@ -172,16 +172,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket_mapjoin - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -204,7 +198,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin - name: default.srcbucket_mapjoin + name: null.null Truncated Path -> Alias: /srcbucket_mapjoin [a] Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin2.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out (working copy) @@ -198,7 +198,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 - numPartitions 1 numRows 0 partition_columns ds rawDataSize 0 @@ -701,7 +700,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 - numPartitions 1 numRows 0 partition_columns ds rawDataSize 0 @@ -1398,7 +1396,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 - numPartitions 1 numRows 0 partition_columns ds rawDataSize 0 Index: ql/src/test/results/clientpositive/join_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/join_map_ppr.q.out (working copy) @@ -162,7 +162,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -727,7 +726,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/stats0.q.out =================================================================== --- ql/src/test/results/clientpositive/stats0.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/stats0.q.out (working copy) @@ -72,16 +72,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -103,7 +97,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [src] @@ -1418,16 +1412,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1449,7 +1437,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [src] Index: ql/src/test/results/clientpositive/smb_mapjoin_11.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_11.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/smb_mapjoin_11.q.out (working copy) @@ -136,7 +136,6 @@ partition values: ds 1 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key columns key,value @@ -144,7 +143,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 1 numRows 500 partition_columns ds rawDataSize 5312 Index: ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out =================================================================== --- ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (working copy) @@ -79,7 +79,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -126,7 +125,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -270,7 +268,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -317,7 +314,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -364,7 +360,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -411,7 +406,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/sample6.q.out =================================================================== --- ql/src/test/results/clientpositive/sample6.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sample6.q.out (working copy) @@ -85,16 +85,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -117,7 +111,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket - name: default.srcbucket + name: null.null Truncated Path -> Alias: /srcbucket/srcbucket0.txt [s] @@ -629,16 +623,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -661,7 +649,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket - name: default.srcbucket + name: null.null Reduce Operator Tree: Extract File Output Operator @@ -1003,16 +991,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1035,7 +1017,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket - name: default.srcbucket + name: null.null Reduce Operator Tree: Extract File Output Operator @@ -1631,16 +1613,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1663,7 +1639,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket - name: default.srcbucket + name: null.null Reduce Operator Tree: Extract File Output Operator @@ -2102,16 +2078,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2134,7 +2104,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket - name: default.srcbucket + name: null.null Reduce Operator Tree: Extract File Output Operator @@ -2559,16 +2529,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket2 - numFiles 4 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket2 { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2591,7 +2555,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 - name: default.srcbucket2 + name: null.null #### A masked pattern was here #### Partition base file name: srcbucket22.txt @@ -2603,16 +2567,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket2 - numFiles 4 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket2 { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2635,7 +2593,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 - name: default.srcbucket2 + name: null.null Reduce Operator Tree: Extract File Output Operator @@ -2861,16 +2819,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket2 - numFiles 4 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket2 { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2893,7 +2845,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 - name: default.srcbucket2 + name: null.null Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/bucket_map_join_1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket_map_join_1.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucket_map_join_1.q.out (working copy) @@ -122,22 +122,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 1 bucket_field_name key columns key,value columns.types string:string #### A masked pattern was here #### - name default.table1 - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct table1 { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -161,7 +154,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 - name: default.table1 + name: null.null Truncated Path -> Alias: /table1 [a] Index: ql/src/test/results/clientpositive/sample1.q.out =================================================================== --- ql/src/test/results/clientpositive/sample1.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sample1.q.out (working copy) @@ -106,7 +106,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/reduce_deduplicate.q.out =================================================================== --- ql/src/test/results/clientpositive/reduce_deduplicate.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/reduce_deduplicate.q.out (working copy) @@ -62,16 +62,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -93,7 +87,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Select Operator Index: ql/src/test/results/clientpositive/partition_wise_fileformat8.q.out =================================================================== --- ql/src/test/results/clientpositive/partition_wise_fileformat8.q.out (revision 0) +++ ql/src/test/results/clientpositive/partition_wise_fileformat8.q.out (working copy) @@ -0,0 +1,143 @@ +PREHOOK: query: -- This tests that a query can span multiple partitions which can not only have different file formats, but +-- also different serdes +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- This tests that a query can span multiple partitions which can not only have different file formats, but +-- also different serdes +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@partition_test_partitioned +PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt='1') select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partition_test_partitioned@dt=1 +POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt='1') select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partition_test_partitioned@dt=1 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: alter table partition_test_partitioned set fileformat sequencefile +PREHOOK: type: ALTERTABLE_FILEFORMAT +PREHOOK: Input: default@partition_test_partitioned +PREHOOK: Output: default@partition_test_partitioned +POSTHOOK: query: alter table partition_test_partitioned set fileformat sequencefile +POSTHOOK: type: ALTERTABLE_FILEFORMAT +POSTHOOK: Input: default@partition_test_partitioned +POSTHOOK: Output: default@partition_test_partitioned +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt='2') select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partition_test_partitioned@dt=2 +POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt='2') select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partition_test_partitioned@dt=2 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@partition_test_partitioned +PREHOOK: Output: default@partition_test_partitioned +POSTHOOK: query: alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@partition_test_partitioned +POSTHOOK: Output: default@partition_test_partitioned +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt='3') select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partition_test_partitioned@dt=3 +POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt='3') select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partition_test_partitioned@dt=3 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select * from partition_test_partitioned where dt is not null order by key, value, dt limit 20 +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +PREHOOK: Input: default@partition_test_partitioned@dt=2 +PREHOOK: Input: default@partition_test_partitioned@dt=3 +#### A masked pattern was here #### +POSTHOOK: query: select * from partition_test_partitioned where dt is not null order by key, value, dt limit 20 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +POSTHOOK: Input: default@partition_test_partitioned@dt=2 +POSTHOOK: Input: default@partition_test_partitioned@dt=3 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +0 val_0 1 +0 val_0 1 +0 val_0 1 +0 val_0 2 +0 val_0 2 +0 val_0 2 +0 val_0 3 +0 val_0 3 +0 val_0 3 +10 val_10 1 +10 val_10 2 +10 val_10 3 +100 val_100 1 +100 val_100 1 +100 val_100 2 +100 val_100 2 +100 val_100 3 +100 val_100 3 +103 val_103 1 +103 val_103 1 +PREHOOK: query: select key+key as key, value, dt from partition_test_partitioned where dt is not null order by key, value, dt limit 20 +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +PREHOOK: Input: default@partition_test_partitioned@dt=2 +PREHOOK: Input: default@partition_test_partitioned@dt=3 +#### A masked pattern was here #### +POSTHOOK: query: select key+key as key, value, dt from partition_test_partitioned where dt is not null order by key, value, dt limit 20 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +POSTHOOK: Input: default@partition_test_partitioned@dt=2 +POSTHOOK: Input: default@partition_test_partitioned@dt=3 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +0.0 val_0 1 +0.0 val_0 1 +0.0 val_0 1 +0.0 val_0 2 +0.0 val_0 2 +0.0 val_0 2 +0.0 val_0 3 +0.0 val_0 3 +0.0 val_0 3 +4.0 val_2 1 +4.0 val_2 2 +4.0 val_2 3 +8.0 val_4 1 +8.0 val_4 2 +8.0 val_4 3 +10.0 val_5 1 +10.0 val_5 1 +10.0 val_5 1 +10.0 val_5 2 +10.0 val_5 2 Index: ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out =================================================================== --- ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out (working copy) @@ -215,7 +215,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 2 numRows 500 partition_columns part rawDataSize 5312 @@ -263,7 +262,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 2 numRows 500 partition_columns part rawDataSize 5312 Index: ql/src/test/results/clientpositive/union22.q.out =================================================================== --- ql/src/test/results/clientpositive/union22.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/union22.q.out (working copy) @@ -194,7 +194,6 @@ #### A masked pattern was here #### name default.dst_union22 numFiles 1 - numPartitions 1 numRows 500 partition_columns ds rawDataSize 11124 @@ -439,7 +438,6 @@ #### A masked pattern was here #### name default.dst_union22_delta numFiles 1 - numPartitions 1 numRows 500 partition_columns ds rawDataSize 16936 Index: ql/src/test/results/clientpositive/join32.q.out =================================================================== --- ql/src/test/results/clientpositive/join32.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/join32.q.out (working copy) @@ -103,16 +103,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -134,7 +128,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [y] Index: ql/src/test/results/clientpositive/input_part1.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part1.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/input_part1.q.out (working copy) @@ -102,7 +102,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/partition_wise_fileformat11.q.out =================================================================== --- ql/src/test/results/clientpositive/partition_wise_fileformat11.q.out (revision 0) +++ ql/src/test/results/clientpositive/partition_wise_fileformat11.q.out (working copy) @@ -0,0 +1,117 @@ +PREHOOK: query: -- This tests that the schema can be changed for binary serde data +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- This tests that the schema can be changed for binary serde data +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@partition_test_partitioned +PREHOOK: query: alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@partition_test_partitioned +PREHOOK: Output: default@partition_test_partitioned +POSTHOOK: query: alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@partition_test_partitioned +POSTHOOK: Output: default@partition_test_partitioned +PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt='1') select * from src where key = 238 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partition_test_partitioned@dt=1 +POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt='1') select * from src where key = 238 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partition_test_partitioned@dt=1 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select * from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select * from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +238 val_238 1 +238 val_238 1 +PREHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +476.0 val_238 +476.0 val_238 +PREHOOK: query: alter table partition_test_partitioned change key key int +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@partition_test_partitioned +PREHOOK: Output: default@partition_test_partitioned +POSTHOOK: query: alter table partition_test_partitioned change key key int +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@partition_test_partitioned +POSTHOOK: Output: default@partition_test_partitioned +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +476 val_238 +476 val_238 +PREHOOK: query: select * from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select * from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +238 val_238 1 +238 val_238 1 +PREHOOK: query: alter table partition_test_partitioned add columns (value2 string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@partition_test_partitioned +PREHOOK: Output: default@partition_test_partitioned +POSTHOOK: query: alter table partition_test_partitioned add columns (value2 string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@partition_test_partitioned +POSTHOOK: Output: default@partition_test_partitioned +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select key+key, value from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +476 val_238 +476 val_238 +PREHOOK: query: select * from partition_test_partitioned where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select * from partition_test_partitioned where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_test_partitioned@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +238 val_238 NULL 1 +238 val_238 NULL 1 Index: ql/src/test/results/clientpositive/transform_ppr2.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr2.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/transform_ppr2.q.out (working copy) @@ -92,7 +92,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -139,7 +138,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/union_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/union_ppr.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/union_ppr.q.out (working copy) @@ -152,7 +152,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -199,7 +198,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/ctas_hadoop20.q.out =================================================================== --- ql/src/test/results/clientpositive/ctas_hadoop20.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/ctas_hadoop20.q.out (working copy) @@ -777,16 +777,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -808,7 +802,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Limit Index: ql/src/test/results/clientpositive/bucketcontext_6.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_6.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketcontext_6.q.out (working copy) @@ -149,7 +149,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -157,7 +156,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -199,7 +197,6 @@ partition values: ds 2008-04-09 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -207,7 +204,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -391,7 +387,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -399,7 +394,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -441,7 +435,6 @@ partition values: ds 2008-04-09 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -449,7 +442,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 Index: ql/src/test/results/clientpositive/bucketcontext_1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_1.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketcontext_1.q.out (working copy) @@ -162,7 +162,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key columns key,value @@ -170,7 +169,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 4 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -212,7 +210,6 @@ partition values: ds 2008-04-09 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key columns key,value @@ -220,7 +217,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 4 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -404,7 +400,6 @@ partition values: ds 2008-04-08 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key columns key,value @@ -412,7 +407,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 4 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 @@ -454,7 +448,6 @@ partition values: ds 2008-04-09 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key columns key,value @@ -462,7 +455,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 4 - numPartitions 2 numRows 0 partition_columns ds rawDataSize 0 Index: ql/src/test/results/clientpositive/bucket1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket1.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucket1.q.out (working copy) @@ -59,16 +59,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -90,7 +84,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Select Operator Index: ql/src/test/results/clientpositive/bucketmapjoin10.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin10.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin10.q.out (working copy) @@ -199,7 +199,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 2 numRows 0 partition_columns part rawDataSize 0 @@ -247,7 +246,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 3 - numPartitions 2 numRows 0 partition_columns part rawDataSize 0 Index: ql/src/test/results/clientpositive/columnstats_tbllvl.q.out =================================================================== --- ql/src/test/results/clientpositive/columnstats_tbllvl.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/columnstats_tbllvl.q.out (working copy) @@ -176,16 +176,10 @@ columns.types string:string:string:float:string:string:string:string:int field.delim | #### A masked pattern was here #### - name default.uservisits_web_text_none - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} + name null.null + serialization.ddl struct null { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7060 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -208,7 +202,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.uservisits_web_text_none - name: default.uservisits_web_text_none + name: null.null Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/smb_mapjoin_15.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_15.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/smb_mapjoin_15.q.out (working copy) @@ -99,22 +99,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key columns key,value columns.types int:string #### A masked pattern was here #### - name default.test_table1 - numFiles 16 - numPartitions 0 - numRows 500 - rawDataSize 5312 - serialization.ddl struct test_table1 { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -138,7 +131,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 - name: default.test_table1 + name: null.null Truncated Path -> Alias: /test_table1 [a] @@ -394,22 +387,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key columns key,key2,value columns.types int:int:string #### A masked pattern was here #### - name default.test_table1 - numFiles 16 - numPartitions 0 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table1 { i32 key, i32 key2, string value} + name null.null + serialization.ddl struct null { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -433,7 +419,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 - name: default.test_table1 + name: null.null Truncated Path -> Alias: /test_table1 [a] @@ -537,6 +523,7 @@ Fetch Operator limit: 10 + PREHOOK: query: SELECT /*+mapjoin(b)*/ * FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 ORDER BY a.key LIMIT 10 PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 @@ -640,22 +627,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key columns key,key2,value columns.types int:int:string #### A masked pattern was here #### - name default.test_table1 - numFiles 16 - numPartitions 0 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table1 { i32 key, i32 key2, string value} + name null.null + serialization.ddl struct null { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -679,7 +659,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 - name: default.test_table1 + name: null.null Truncated Path -> Alias: /test_table1 [a] @@ -783,6 +763,7 @@ Fetch Operator limit: 10 + PREHOOK: query: SELECT /*+mapjoin(b)*/ * FROM test_table1 a JOIN test_table2 b ON a.key2 = b.key2 and a.key = b.key ORDER BY a.key LIMIT 10 PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 @@ -917,22 +898,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key columns key,key2,value columns.types int:int:string #### A masked pattern was here #### - name default.test_table1 - numFiles 16 - numPartitions 0 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table1 { i32 key, i32 key2, string value} + name null.null + serialization.ddl struct null { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -956,7 +930,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 - name: default.test_table1 + name: null.null Truncated Path -> Alias: /test_table1 [a] Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin1.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out (working copy) @@ -465,16 +465,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket_mapjoin - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -497,7 +491,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin - name: default.srcbucket_mapjoin + name: null.null Truncated Path -> Alias: /srcbucket_mapjoin [a] @@ -970,7 +964,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 - numPartitions 1 numRows 0 partition_columns ds rawDataSize 0 @@ -1214,7 +1207,6 @@ hdfs directory: true #### A masked pattern was here #### - PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b Index: ql/src/test/results/clientpositive/sample10.q.out =================================================================== --- ql/src/test/results/clientpositive/sample10.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sample10.q.out (working copy) @@ -113,7 +113,6 @@ #### A masked pattern was here #### name default.srcpartbucket numFiles 4 - numPartitions 4 numRows 10 partition_columns ds/hr rawDataSize 60 @@ -162,7 +161,6 @@ #### A masked pattern was here #### name default.srcpartbucket numFiles 4 - numPartitions 4 numRows 10 partition_columns ds/hr rawDataSize 60 @@ -211,7 +209,6 @@ #### A masked pattern was here #### name default.srcpartbucket numFiles 4 - numPartitions 4 numRows 10 partition_columns ds/hr rawDataSize 60 @@ -260,7 +257,6 @@ #### A masked pattern was here #### name default.srcpartbucket numFiles 4 - numPartitions 4 numRows 10 partition_columns ds/hr rawDataSize 60 Index: ql/src/test/results/clientpositive/udf_java_method.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_java_method.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/udf_java_method.q.out (working copy) @@ -99,16 +99,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -130,7 +124,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [src] Index: ql/src/test/results/clientpositive/sample5.q.out =================================================================== --- ql/src/test/results/clientpositive/sample5.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sample5.q.out (working copy) @@ -85,16 +85,10 @@ columns key,value columns.types int:string #### A masked pattern was here #### - name default.srcbucket - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket { i32 key, string value} + name null.null + serialization.ddl struct null { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -117,7 +111,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket - name: default.srcbucket + name: null.null Truncated Path -> Alias: /srcbucket [s] Index: ql/src/test/results/clientpositive/udf_explode.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_explode.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/udf_explode.q.out (working copy) @@ -66,16 +66,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -97,7 +91,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [src] @@ -154,16 +148,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -185,7 +173,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Limit @@ -390,16 +378,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -421,7 +403,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [src] @@ -480,16 +462,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -511,7 +487,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Limit @@ -636,7 +612,6 @@ Fetch Operator limit: -1 - PREHOOK: query: SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3 PREHOOK: type: QUERY PREHOOK: Input: default@src Index: ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out =================================================================== --- ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out (working copy) @@ -151,7 +151,6 @@ partition values: part 1 properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value @@ -159,7 +158,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 - numPartitions 1 numRows 500 partition_columns part rawDataSize 5312 Index: ql/src/test/results/clientpositive/partition_wise_fileformat10.q.out =================================================================== --- ql/src/test/results/clientpositive/partition_wise_fileformat10.q.out (revision 0) +++ ql/src/test/results/clientpositive/partition_wise_fileformat10.q.out (working copy) @@ -0,0 +1,75 @@ +PREHOOK: query: -- This tests that the schema can be changed for binary serde data +create table prt(key string, value string) partitioned by (dt string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- This tests that the schema can be changed for binary serde data +create table prt(key string, value string) partitioned by (dt string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@prt +PREHOOK: query: insert overwrite table prt partition(dt='1') select * from src where key = 238 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@prt@dt=1 +POSTHOOK: query: insert overwrite table prt partition(dt='1') select * from src where key = 238 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@prt@dt=1 +POSTHOOK: Lineage: prt PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: prt PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select * from prt where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@prt@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select * from prt where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@prt@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: prt PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: prt PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +238 val_238 1 +238 val_238 1 +PREHOOK: query: select key+key, value from prt where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@prt@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select key+key, value from prt where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@prt@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: prt PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: prt PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +476.0 val_238 +476.0 val_238 +PREHOOK: query: alter table prt add columns (value2 string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@prt +PREHOOK: Output: default@prt +POSTHOOK: query: alter table prt add columns (value2 string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@prt +POSTHOOK: Output: default@prt +POSTHOOK: Lineage: prt PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: prt PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select key+key, value from prt where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@prt@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select key+key, value from prt where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@prt@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: prt PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: prt PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +476.0 val_238 +476.0 val_238 +PREHOOK: query: select * from prt where dt is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@prt@dt=1 +#### A masked pattern was here #### +POSTHOOK: query: select * from prt where dt is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@prt@dt=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: prt PARTITION(dt=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: prt PARTITION(dt=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +238 val_238 NULL 1 +238 val_238 NULL 1 Index: ql/src/test/results/clientpositive/list_bucket_dml_10.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_10.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/list_bucket_dml_10.q.out (working copy) @@ -116,16 +116,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -147,7 +141,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Truncated Path -> Alias: /src [src] Index: ql/src/test/results/clientpositive/transform_ppr1.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/transform_ppr1.q.out (working copy) @@ -90,7 +90,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -137,7 +136,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -184,7 +182,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 @@ -231,7 +228,6 @@ #### A masked pattern was here #### name default.srcpart numFiles 1 - numPartitions 4 numRows 0 partition_columns ds/hr rawDataSize 0 Index: ql/src/test/results/clientpositive/regexp_extract.q.out =================================================================== --- ql/src/test/results/clientpositive/regexp_extract.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/regexp_extract.q.out (working copy) @@ -86,16 +86,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -117,7 +111,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Select Operator @@ -344,16 +338,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -375,7 +363,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Select Operator Index: ql/src/test/results/clientpositive/ppd_union_view.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_union_view.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/ppd_union_view.q.out (working copy) @@ -260,7 +260,6 @@ #### A masked pattern was here #### name default.t1_mapping numFiles 1 - numPartitions 2 numRows 1 partition_columns ds rawDataSize 12 @@ -306,7 +305,6 @@ #### A masked pattern was here #### name default.t1_old numFiles 1 - numPartitions 2 numRows 1 partition_columns ds rawDataSize 14 @@ -791,7 +789,6 @@ #### A masked pattern was here #### name default.t1_new numFiles 1 - numPartitions 2 numRows 1 partition_columns ds rawDataSize 11 Index: ql/src/test/results/clientpositive/bucketcontext_5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_5.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucketcontext_5.q.out (working copy) @@ -133,22 +133,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value columns.types string:string #### A masked pattern was here #### - name default.bucket_big - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_big { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -172,7 +165,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big - name: default.bucket_big + name: null.null Truncated Path -> Alias: /bucket_big [b] @@ -318,22 +311,15 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key columns key,value columns.types string:string #### A masked pattern was here #### - name default.bucket_big - numFiles 2 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_big { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -357,7 +343,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big - name: default.bucket_big + name: null.null Truncated Path -> Alias: /bucket_big [b] Index: ql/src/test/results/clientpositive/bucket5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket5.q.out (revision 1426896) +++ ql/src/test/results/clientpositive/bucket5.q.out (working copy) @@ -105,16 +105,10 @@ columns key,value columns.types string:string #### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} + name null.null + serialization.ddl struct null { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -136,7 +130,7 @@ #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src - name: default.src + name: null.null Reduce Operator Tree: Extract Select Operator Index: ql/src/test/results/compiler/plan/input2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input2.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input2.q.xml (working copy) @@ -1598,19 +1598,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1621,18 +1617,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1645,10 +1629,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1656,10 +1636,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -2807,19 +2783,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -2830,18 +2802,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -2854,10 +2814,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2865,10 +2821,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/join3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join3.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/join3.q.xml (working copy) @@ -179,19 +179,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -202,18 +198,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -226,10 +210,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -237,10 +217,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -351,19 +327,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -374,18 +346,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -398,10 +358,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -409,10 +365,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -523,19 +475,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -546,18 +494,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -570,10 +506,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -581,10 +513,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1623,19 +1551,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1646,18 +1570,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1670,10 +1582,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1681,10 +1589,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input4.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input4.q.xml (working copy) @@ -179,19 +179,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -202,18 +198,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -226,10 +210,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -237,10 +217,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1076,19 +1052,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1099,18 +1071,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1123,10 +1083,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1134,10 +1090,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/join5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join5.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/join5.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -198,19 +174,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -221,18 +193,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -245,10 +205,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -256,10 +212,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1566,19 +1518,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1589,18 +1537,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1613,10 +1549,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1624,10 +1556,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input6.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input6.q.xml (working copy) @@ -551,19 +551,15 @@ name - default.src1 + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src1 { string key, string value} + struct null { string key, string value} serialization.format @@ -574,18 +570,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -598,10 +582,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 216 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -609,10 +589,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1133,19 +1109,15 @@ name - default.src1 + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src1 { string key, string value} + struct null { string key, string value} serialization.format @@ -1156,18 +1128,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1180,10 +1140,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 216 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1191,10 +1147,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input_testxpath2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testxpath2.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input_testxpath2.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src_thrift + null.null - numFiles - 1 - - columns.types serialization.ddl - struct src_thrift { } + struct null { } columns @@ -49,18 +45,6 @@ org.apache.thrift.protocol.TBinaryProtocol - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -77,10 +61,6 @@ org.apache.hadoop.mapred.SequenceFileInputFormat - totalSize - 1606 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -88,10 +68,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -914,19 +890,15 @@ name - default.src_thrift + null.null - numFiles - 1 - - columns.types serialization.ddl - struct src_thrift { } + struct null { } columns @@ -937,18 +909,6 @@ org.apache.thrift.protocol.TBinaryProtocol - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -965,10 +925,6 @@ org.apache.hadoop.mapred.SequenceFileInputFormat - totalSize - 1606 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -976,10 +932,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/join7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join7.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/join7.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -198,19 +174,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -221,18 +193,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -245,10 +205,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -256,10 +212,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -370,19 +322,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -393,18 +341,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -417,10 +353,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -428,10 +360,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -2314,19 +2242,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -2337,18 +2261,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -2361,10 +2273,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2372,10 +2280,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input8.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input8.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src1 + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src1 { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 216 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -714,19 +690,15 @@ name - default.src1 + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src1 { string key, string value} + struct null { string key, string value} serialization.format @@ -737,18 +709,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -761,10 +721,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 216 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -772,10 +728,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/union.q.xml =================================================================== --- ql/src/test/results/compiler/plan/union.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/union.q.xml (working copy) @@ -424,19 +424,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -447,18 +443,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -471,10 +455,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -482,10 +462,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -596,19 +572,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -619,18 +591,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -643,10 +603,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -654,10 +610,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1716,19 +1668,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1739,18 +1687,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1763,10 +1699,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1774,10 +1706,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/udf4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf4.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/udf4.q.xml (working copy) @@ -26,7 +26,7 @@ name - default.dest1 + null.null columns.types @@ -34,7 +34,7 @@ serialization.ddl - struct dest1 { string key, string value} + struct null { string key, string value} serialization.format @@ -64,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1867,7 +1863,7 @@ name - default.dest1 + null.null columns.types @@ -1875,7 +1871,7 @@ serialization.ddl - struct dest1 { string key, string value} + struct null { string key, string value} serialization.format @@ -1905,10 +1901,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/udf6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf6.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/udf6.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -645,19 +621,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -668,18 +640,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -692,10 +652,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -703,10 +659,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input_part1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_part1.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input_part1.q.xml (working copy) @@ -39,7 +39,7 @@ numFiles - 4 + 1 columns.types @@ -66,10 +66,6 @@ 0 - numPartitions - 4 - - partition_columns ds/hr @@ -87,7 +83,7 @@ totalSize - 23248 + 5812 file.outputformat @@ -865,10 +861,6 @@ 0 - numPartitions - 4 - - partition_columns ds/hr Index: ql/src/test/results/compiler/plan/groupby2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby2.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/groupby2.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1166,19 +1142,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1189,18 +1161,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1213,10 +1173,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1224,10 +1180,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/groupby4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby4.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/groupby4.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -815,19 +791,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -838,18 +810,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -862,10 +822,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -873,10 +829,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/groupby6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby6.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/groupby6.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -815,19 +791,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -838,18 +810,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -862,10 +822,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -873,10 +829,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/case_sensitivity.q.xml =================================================================== --- ql/src/test/results/compiler/plan/case_sensitivity.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/case_sensitivity.q.xml (working copy) @@ -551,19 +551,15 @@ name - default.src_thrift + null.null - numFiles - 1 - - columns.types serialization.ddl - struct src_thrift { } + struct null { } columns @@ -574,18 +570,6 @@ org.apache.thrift.protocol.TBinaryProtocol - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -602,10 +586,6 @@ org.apache.hadoop.mapred.SequenceFileInputFormat - totalSize - 1606 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -613,10 +593,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1332,19 +1308,15 @@ name - default.src_thrift + null.null - numFiles - 1 - - columns.types serialization.ddl - struct src_thrift { } + struct null { } columns @@ -1355,18 +1327,6 @@ org.apache.thrift.protocol.TBinaryProtocol - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1383,10 +1343,6 @@ org.apache.hadoop.mapred.SequenceFileInputFormat - totalSize - 1606 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1394,10 +1350,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/sample1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample1.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/sample1.q.xml (working copy) @@ -39,7 +39,7 @@ numFiles - 4 + 1 columns.types @@ -66,10 +66,6 @@ 0 - numPartitions - 4 - - partition_columns ds/hr @@ -87,7 +83,7 @@ totalSize - 23248 + 5812 file.outputformat @@ -986,10 +982,6 @@ 0 - numPartitions - 4 - - partition_columns ds/hr Index: ql/src/test/results/compiler/plan/sample3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample3.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/sample3.q.xml (working copy) @@ -551,13 +551,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -567,7 +563,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -578,18 +574,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -602,10 +586,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -613,10 +593,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1269,13 +1245,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -1285,7 +1257,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -1296,18 +1268,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -1320,10 +1280,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1331,10 +1287,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/sample5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample5.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/sample5.q.xml (working copy) @@ -551,13 +551,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -567,7 +563,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -578,18 +574,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -602,10 +586,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -613,10 +593,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1256,13 +1232,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -1272,7 +1244,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -1283,18 +1255,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -1307,10 +1267,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1318,10 +1274,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/sample7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample7.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/sample7.q.xml (working copy) @@ -551,13 +551,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -567,7 +563,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -578,18 +574,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -602,10 +586,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -613,10 +593,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1341,13 +1317,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -1357,7 +1329,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -1368,18 +1340,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -1392,10 +1352,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1403,10 +1359,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/cast1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/cast1.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/cast1.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1139,19 +1115,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1162,18 +1134,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1186,10 +1146,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1197,10 +1153,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input1.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input1.q.xml (working copy) @@ -551,19 +551,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -574,18 +570,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -598,10 +582,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -609,10 +589,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1147,19 +1123,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1170,18 +1142,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1194,10 +1154,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1205,10 +1161,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/join2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join2.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/join2.q.xml (working copy) @@ -190,19 +190,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -213,18 +209,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -237,10 +221,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -248,10 +228,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1101,19 +1077,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1124,18 +1096,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1148,10 +1108,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1159,10 +1115,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1282,7 +1234,7 @@ 200 - + 1 @@ -1770,19 +1722,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1793,18 +1741,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1817,10 +1753,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1828,10 +1760,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1942,19 +1870,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1965,18 +1889,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1989,10 +1901,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2000,10 +1908,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -2738,19 +2642,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -2761,18 +2661,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -2785,10 +2673,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2796,10 +2680,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input3.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input3.q.xml (working copy) @@ -1975,19 +1975,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1998,18 +1994,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -2022,10 +2006,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2033,10 +2013,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -3473,19 +3449,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -3496,18 +3468,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -3520,10 +3480,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -3531,10 +3487,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/join4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join4.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/join4.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -198,19 +174,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -221,18 +193,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -245,10 +205,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -256,10 +212,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1566,19 +1518,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1589,18 +1537,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1613,10 +1549,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1624,10 +1556,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input5.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input5.q.xml (working copy) @@ -179,19 +179,15 @@ name - default.src_thrift + null.null - numFiles - 1 - - columns.types serialization.ddl - struct src_thrift { } + struct null { } columns @@ -202,18 +198,6 @@ org.apache.thrift.protocol.TBinaryProtocol - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -230,10 +214,6 @@ org.apache.hadoop.mapred.SequenceFileInputFormat - totalSize - 1606 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -241,10 +221,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1106,19 +1082,15 @@ name - default.src_thrift + null.null - numFiles - 1 - - columns.types serialization.ddl - struct src_thrift { } + struct null { } columns @@ -1129,18 +1101,6 @@ org.apache.thrift.protocol.TBinaryProtocol - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1157,10 +1117,6 @@ org.apache.hadoop.mapred.SequenceFileInputFormat - totalSize - 1606 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1168,10 +1124,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/join6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join6.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/join6.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -198,19 +174,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -221,18 +193,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -245,10 +205,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -256,10 +212,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1566,19 +1518,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1589,18 +1537,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1613,10 +1549,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1624,10 +1556,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input7.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input7.q.xml (working copy) @@ -551,19 +551,15 @@ name - default.src1 + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src1 { string key, string value} + struct null { string key, string value} serialization.format @@ -574,18 +570,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -598,10 +582,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 216 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -609,10 +589,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1049,19 +1025,15 @@ name - default.src1 + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src1 { string key, string value} + struct null { string key, string value} serialization.format @@ -1072,18 +1044,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1096,10 +1056,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 216 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1107,10 +1063,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input_testsequencefile.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (working copy) @@ -551,19 +551,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -574,18 +570,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -598,10 +582,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -609,10 +589,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1057,19 +1033,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1080,18 +1052,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1104,10 +1064,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1115,10 +1071,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/join8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join8.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/join8.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -198,19 +174,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -221,18 +193,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -245,10 +205,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -256,10 +212,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1648,19 +1600,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1671,18 +1619,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1695,10 +1631,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1706,10 +1638,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input9.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input9.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input9.q.xml (working copy) @@ -551,19 +551,15 @@ name - default.src1 + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src1 { string key, string value} + struct null { string key, string value} serialization.format @@ -574,18 +570,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -598,10 +582,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 216 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -609,10 +589,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1126,19 +1102,15 @@ name - default.src1 + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src1 { string key, string value} + struct null { string key, string value} serialization.format @@ -1149,18 +1121,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1173,10 +1133,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 216 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1184,10 +1140,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/udf1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf1.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/udf1.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1965,19 +1941,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1988,18 +1960,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -2012,10 +1972,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2023,10 +1979,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input_testxpath.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testxpath.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input_testxpath.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src_thrift + null.null - numFiles - 1 - - columns.types serialization.ddl - struct src_thrift { } + struct null { } columns @@ -49,18 +45,6 @@ org.apache.thrift.protocol.TBinaryProtocol - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -77,10 +61,6 @@ org.apache.hadoop.mapred.SequenceFileInputFormat - totalSize - 1606 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -88,10 +68,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -826,19 +802,15 @@ name - default.src_thrift + null.null - numFiles - 1 - - columns.types serialization.ddl - struct src_thrift { } + struct null { } columns @@ -849,18 +821,6 @@ org.apache.thrift.protocol.TBinaryProtocol - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -877,10 +837,6 @@ org.apache.hadoop.mapred.SequenceFileInputFormat - totalSize - 1606 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -888,10 +844,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/groupby1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby1.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/groupby1.q.xml (working copy) @@ -179,19 +179,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -202,18 +198,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -226,10 +210,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -237,10 +217,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1064,19 +1040,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1087,18 +1059,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1111,10 +1071,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1122,10 +1078,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/udf_case.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_case.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/udf_case.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -739,19 +715,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -762,18 +734,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -786,10 +746,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -797,10 +753,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/subq.q.xml =================================================================== --- ql/src/test/results/compiler/plan/subq.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/subq.q.xml (working copy) @@ -424,19 +424,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -447,18 +443,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -471,10 +455,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -482,10 +462,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1144,19 +1120,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1167,18 +1139,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1191,10 +1151,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1202,10 +1158,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/groupby3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby3.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/groupby3.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1365,19 +1341,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1388,18 +1360,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1412,10 +1372,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1423,10 +1379,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/groupby5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby5.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/groupby5.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -908,19 +884,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -931,18 +903,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -955,10 +915,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -966,10 +922,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/udf_when.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_when.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/udf_when.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -819,19 +795,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -842,18 +814,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -866,10 +826,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -877,10 +833,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/input20.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input20.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/input20.q.xml (working copy) @@ -26,19 +26,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -49,18 +45,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -73,10 +57,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,10 +64,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -923,19 +899,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -946,18 +918,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -970,10 +930,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -981,10 +937,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/sample2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample2.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/sample2.q.xml (working copy) @@ -551,13 +551,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -567,7 +563,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -578,18 +574,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -602,10 +586,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -613,10 +593,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1259,13 +1235,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -1275,7 +1247,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -1286,18 +1258,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -1310,10 +1270,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1321,10 +1277,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/sample4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample4.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/sample4.q.xml (working copy) @@ -551,13 +551,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -567,7 +563,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -578,18 +574,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -602,10 +586,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -613,10 +593,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1259,13 +1235,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -1275,7 +1247,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -1286,18 +1258,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -1310,10 +1270,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1321,10 +1277,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/sample6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample6.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/sample6.q.xml (working copy) @@ -551,13 +551,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -567,7 +563,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -578,18 +574,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -602,10 +586,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -613,10 +593,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1259,13 +1235,9 @@ name - default.srcbucket + null.null - numFiles - 2 - - columns.types int:string @@ -1275,7 +1247,7 @@ serialization.ddl - struct srcbucket { i32 key, string value} + struct null { i32 key, string value} columns @@ -1286,18 +1258,6 @@ 1 - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count 2 @@ -1310,10 +1270,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 11603 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1321,10 +1277,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/results/compiler/plan/join1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join1.q.xml (revision 1426896) +++ ql/src/test/results/compiler/plan/join1.q.xml (working copy) @@ -179,19 +179,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -202,18 +198,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -226,10 +210,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -237,10 +217,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -351,19 +327,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -374,18 +346,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -398,10 +358,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -409,10 +365,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - @@ -1175,19 +1127,15 @@ name - default.src + null.null - numFiles - 1 - - columns.types string:string serialization.ddl - struct src { string key, string value} + struct null { string key, string value} serialization.format @@ -1198,18 +1146,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 0 - - bucket_count -1 @@ -1222,10 +1158,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 5812 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1233,10 +1165,6 @@ location #### A masked pattern was here #### - - transient_lastDdlTime - #### A masked pattern was here #### - Index: ql/src/test/queries/clientpositive/partition_wise_fileformat8.q =================================================================== --- ql/src/test/queries/clientpositive/partition_wise_fileformat8.q (revision 0) +++ ql/src/test/queries/clientpositive/partition_wise_fileformat8.q (working copy) @@ -0,0 +1,13 @@ +set hive.input.format = org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; + +-- This tests that a query can span multiple partitions which can not only have different file formats, but +-- also different serdes +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile; +insert overwrite table partition_test_partitioned partition(dt='1') select * from src; +alter table partition_test_partitioned set fileformat sequencefile; +insert overwrite table partition_test_partitioned partition(dt='2') select * from src; +alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'; +insert overwrite table partition_test_partitioned partition(dt='3') select * from src; + +select * from partition_test_partitioned where dt is not null order by key, value, dt limit 20; +select key+key as key, value, dt from partition_test_partitioned where dt is not null order by key, value, dt limit 20; Index: ql/src/test/queries/clientpositive/partition_wise_fileformat12.q =================================================================== --- ql/src/test/queries/clientpositive/partition_wise_fileformat12.q (revision 0) +++ ql/src/test/queries/clientpositive/partition_wise_fileformat12.q (working copy) @@ -0,0 +1,26 @@ +set hive.input.format = org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; + +-- This tests that the schema can be changed for binary serde data +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile; +alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe'; +insert overwrite table partition_test_partitioned partition(dt='1') select * from src where key = 238; + +select * from partition_test_partitioned where dt is not null; +select key+key, value from partition_test_partitioned where dt is not null; + +alter table partition_test_partitioned change key key int; + +select key+key, value from partition_test_partitioned where dt is not null; +select * from partition_test_partitioned where dt is not null; + +insert overwrite table partition_test_partitioned partition(dt='2') select * from src where key = 97; + +alter table partition_test_partitioned add columns (value2 string); + +select key+key, value from partition_test_partitioned where dt is not null; +select * from partition_test_partitioned where dt is not null; + +insert overwrite table partition_test_partitioned partition(dt='3') select key, value, value from src where key = 200; + +select key+key, value, value2 from partition_test_partitioned where dt is not null; +select * from partition_test_partitioned where dt is not null; Index: ql/src/test/queries/clientpositive/partition_wise_fileformat9.q =================================================================== --- ql/src/test/queries/clientpositive/partition_wise_fileformat9.q (revision 0) +++ ql/src/test/queries/clientpositive/partition_wise_fileformat9.q (working copy) @@ -0,0 +1,11 @@ +set hive.input.format = org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; + +-- This tests that a query can span multiple partitions which can not only have different file formats, but +-- also different serdes +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile; +insert overwrite table partition_test_partitioned partition(dt='1') select * from src; +alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe'; +insert overwrite table partition_test_partitioned partition(dt='2') select * from src; + +select * from partition_test_partitioned where dt is not null order by key, value, dt limit 20; +select key+key as key, value, dt from partition_test_partitioned where dt is not null order by key, value, dt limit 20; \ No newline at end of file Index: ql/src/test/queries/clientpositive/partition_wise_fileformat10.q =================================================================== --- ql/src/test/queries/clientpositive/partition_wise_fileformat10.q (revision 0) +++ ql/src/test/queries/clientpositive/partition_wise_fileformat10.q (working copy) @@ -0,0 +1,13 @@ +set hive.input.format = org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; + +-- This tests that the schema can be changed for binary serde data +create table prt(key string, value string) partitioned by (dt string); +insert overwrite table prt partition(dt='1') select * from src where key = 238; + +select * from prt where dt is not null; +select key+key, value from prt where dt is not null; + +alter table prt add columns (value2 string); + +select key+key, value from prt where dt is not null; +select * from prt where dt is not null; Index: ql/src/test/queries/clientpositive/partition_wise_fileformat11.q =================================================================== --- ql/src/test/queries/clientpositive/partition_wise_fileformat11.q (revision 0) +++ ql/src/test/queries/clientpositive/partition_wise_fileformat11.q (working copy) @@ -0,0 +1,19 @@ +set hive.input.format = org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; + +-- This tests that the schema can be changed for binary serde data +create table partition_test_partitioned(key string, value string) partitioned by (dt string) stored as rcfile; +alter table partition_test_partitioned set serde 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe'; +insert overwrite table partition_test_partitioned partition(dt='1') select * from src where key = 238; + +select * from partition_test_partitioned where dt is not null; +select key+key, value from partition_test_partitioned where dt is not null; + +alter table partition_test_partitioned change key key int; + +select key+key, value from partition_test_partitioned where dt is not null; +select * from partition_test_partitioned where dt is not null; + +alter table partition_test_partitioned add columns (value2 string); + +select key+key, value from partition_test_partitioned where dt is not null; +select * from partition_test_partitioned where dt is not null; Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (working copy) @@ -793,7 +793,7 @@ partDir.add(p); try { - partDesc.add(Utilities.getPartitionDescFromTableDesc(tblDesc, part)); + partDesc.add(Utilities.getPartitionDesc(part)); } catch (HiveException e) { LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); throw new SemanticException(e.getMessage(), e); Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (working copy) @@ -214,7 +214,7 @@ getInputFormatClass(); // This will set up field: outputFormatClass getOutputFormatClass(); - + getDeserializer(); } public String getName() { @@ -276,6 +276,10 @@ return MetaStoreUtils.getSchema(tPartition, table.getTTable()); } + public Properties getSchemaFromPartitionSchema() { + return MetaStoreUtils.getPartitionSchema(tPartition, table.getTTable()); + } + public Properties getSchemaFromTableSchema(Properties tblSchema) { return MetaStoreUtils.getPartSchemaFromTableSchema(tPartition.getSd(), table.getTTable().getSd(), tPartition.getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys(), Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (working copy) @@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.plan.MapredWork; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.PartitionDesc; +import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.TableScanDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.serde2.Deserializer; @@ -45,6 +46,8 @@ import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; @@ -78,7 +81,9 @@ private transient Writable[] vcValues; private transient List vcs; private transient Object[] rowWithPartAndVC; - private transient StructObjectInspector rowObjectInspector; + private transient StructObjectInspector tblRowObjectInspector; + // convert from partition to table schema + private transient Converter partTblObjectInspectorConverter; private transient boolean isPartitioned; private transient boolean hasVC; private Map opCtxMap; @@ -141,15 +146,16 @@ } private static class MapOpCtx { - boolean isPartitioned; - StructObjectInspector rawRowObjectInspector; // without partition - StructObjectInspector partObjectInspector; // partition - StructObjectInspector rowObjectInspector; - Object[] rowWithPart; - Object[] rowWithPartAndVC; - Deserializer deserializer; - public String tableName; - public String partName; + private final boolean isPartitioned; + private final StructObjectInspector tblRawRowObjectInspector; // without partition + private final StructObjectInspector partObjectInspector; // partition + private StructObjectInspector rowObjectInspector; + private final Converter partTblObjectInspectorConverter; + private final Object[] rowWithPart; + private Object[] rowWithPartAndVC; + private final Deserializer deserializer; + private String tableName; + private String partName; /** * @param isPartitioned @@ -158,18 +164,20 @@ */ public MapOpCtx(boolean isPartitioned, StructObjectInspector rowObjectInspector, - StructObjectInspector rawRowObjectInspector, + StructObjectInspector tblRawRowObjectInspector, StructObjectInspector partObjectInspector, Object[] rowWithPart, Object[] rowWithPartAndVC, - Deserializer deserializer) { + Deserializer deserializer, + Converter partTblObjectInspectorConverter) { this.isPartitioned = isPartitioned; this.rowObjectInspector = rowObjectInspector; - this.rawRowObjectInspector = rawRowObjectInspector; + this.tblRawRowObjectInspector = tblRawRowObjectInspector; this.partObjectInspector = partObjectInspector; this.rowWithPart = rowWithPart; this.rowWithPartAndVC = rowWithPartAndVC; this.deserializer = deserializer; + this.partTblObjectInspectorConverter = partTblObjectInspectorConverter; } /** @@ -186,6 +194,10 @@ return rowObjectInspector; } + public StructObjectInspector getTblRawRowObjectInspector() { + return tblRawRowObjectInspector; + } + /** * @return the rowWithPart */ @@ -206,6 +218,10 @@ public Deserializer getDeserializer() { return deserializer; } + + public Converter getPartTblObjectInspectorConverter() { + return partTblObjectInspectorConverter; + } } /** @@ -225,38 +241,43 @@ } private MapOpCtx initObjectInspector(MapredWork conf, - Configuration hconf, String onefile) throws HiveException, + Configuration hconf, String onefile, Map convertedOI) + throws HiveException, ClassNotFoundException, InstantiationException, IllegalAccessException, SerDeException { - PartitionDesc td = conf.getPathToPartitionInfo().get(onefile); - LinkedHashMap partSpec = td.getPartSpec(); - Properties tblProps = td.getProperties(); + PartitionDesc pd = conf.getPathToPartitionInfo().get(onefile); + LinkedHashMap partSpec = pd.getPartSpec(); + Properties partProps = pd.getProperties(); - Class sdclass = td.getDeserializerClass(); + Class sdclass = pd.getDeserializerClass(); if (sdclass == null) { - String className = td.getSerdeClassName(); + String className = pd.getSerdeClassName(); if ((className == "") || (className == null)) { throw new HiveException( "SerDe class or the SerDe class name is not set for table: " - + td.getProperties().getProperty("name")); + + pd.getProperties().getProperty("name")); } sdclass = hconf.getClassByName(className); } - String tableName = String.valueOf(tblProps.getProperty("name")); + String tableName = String.valueOf(partProps.getProperty("name")); String partName = String.valueOf(partSpec); - // HiveConf.setVar(hconf, HiveConf.ConfVars.HIVETABLENAME, tableName); - // HiveConf.setVar(hconf, HiveConf.ConfVars.HIVEPARTITIONNAME, partName); - Deserializer deserializer = (Deserializer) sdclass.newInstance(); - deserializer.initialize(hconf, tblProps); - StructObjectInspector rawRowObjectInspector = (StructObjectInspector) deserializer + Deserializer partDeserializer = (Deserializer) sdclass.newInstance(); + partDeserializer.initialize(hconf, partProps); + StructObjectInspector partRawRowObjectInspector = (StructObjectInspector) partDeserializer .getObjectInspector(); + StructObjectInspector tblRawRowObjectInspector = convertedOI.get(pd.getTableDesc()); + + partTblObjectInspectorConverter = + ObjectInspectorConverters.getConverter(partRawRowObjectInspector, + tblRawRowObjectInspector); + MapOpCtx opCtx = null; // Next check if this table has partitions and if so // get the list of partition names as well as allocate // the serdes for the partition columns - String pcols = tblProps + String pcols = partProps .getProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS); // Log LOG = LogFactory.getLog(MapOperator.class.getName()); if (pcols != null && pcols.length() > 0) { @@ -285,16 +306,16 @@ rowWithPart[1] = partValues; StructObjectInspector rowObjectInspector = ObjectInspectorFactory .getUnionStructObjectInspector(Arrays - .asList(new StructObjectInspector[] {rawRowObjectInspector, partObjectInspector})); + .asList(new StructObjectInspector[] {tblRawRowObjectInspector, partObjectInspector})); // LOG.info("dump " + tableName + " " + partName + " " + // rowObjectInspector.getTypeName()); - opCtx = new MapOpCtx(true, rowObjectInspector, rawRowObjectInspector, partObjectInspector, - rowWithPart, null, deserializer); + opCtx = new MapOpCtx(true, rowObjectInspector, tblRawRowObjectInspector, partObjectInspector, + rowWithPart, null, partDeserializer, partTblObjectInspectorConverter); } else { // LOG.info("dump2 " + tableName + " " + partName + " " + // rowObjectInspector.getTypeName()); - opCtx = new MapOpCtx(false, rawRowObjectInspector, rawRowObjectInspector, null, null, - null, deserializer); + opCtx = new MapOpCtx(false, tblRawRowObjectInspector, tblRawRowObjectInspector, null, null, + null, partDeserializer, partTblObjectInspectorConverter); } opCtx.tableName = tableName; opCtx.partName = partName; @@ -312,7 +333,8 @@ isPartitioned = opCtxMap.get(inp).isPartitioned(); rowWithPart = opCtxMap.get(inp).getRowWithPart(); rowWithPartAndVC = opCtxMap.get(inp).getRowWithPartAndVC(); - rowObjectInspector = opCtxMap.get(inp).getRowObjectInspector(); + tblRowObjectInspector = opCtxMap.get(inp).getRowObjectInspector(); + partTblObjectInspectorConverter = opCtxMap.get(inp).getPartTblObjectInspectorConverter(); if (listInputPaths.contains(inp)) { return; } @@ -320,7 +342,8 @@ listInputPaths.add(inp); if (op instanceof TableScanOperator) { - StructObjectInspector rawRowObjectInspector = opCtxMap.get(inp).rawRowObjectInspector; + StructObjectInspector tblRawRowObjectInspector = + opCtxMap.get(inp).getTblRawRowObjectInspector(); StructObjectInspector partObjectInspector = opCtxMap.get(inp).partObjectInspector; TableScanOperator tsOp = (TableScanOperator) op; TableScanDesc tsDesc = tsOp.getConf(); @@ -348,22 +371,140 @@ this.rowWithPartAndVC = new Object[2]; } if (partObjectInspector == null) { - this.rowObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(Arrays + this.tblRowObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(Arrays .asList(new StructObjectInspector[] { - rowObjectInspector, vcStructObjectInspector})); + tblRowObjectInspector, vcStructObjectInspector})); } else { - this.rowObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(Arrays + this.tblRowObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(Arrays .asList(new StructObjectInspector[] { - rawRowObjectInspector, partObjectInspector, + tblRawRowObjectInspector, partObjectInspector, vcStructObjectInspector})); } - opCtxMap.get(inp).rowObjectInspector = this.rowObjectInspector; + opCtxMap.get(inp).rowObjectInspector = this.tblRowObjectInspector; opCtxMap.get(inp).rowWithPartAndVC = this.rowWithPartAndVC; } } } } + private boolean isIdentityConverterOK(Configuration hconf) throws HiveException { + Map tableDescOI = + new HashMap(); + try + { + for (String onefile : conf.getPathToAliases().keySet()) { + PartitionDesc pd = conf.getPathToPartitionInfo().get(onefile); + Properties partProps = pd.getProperties(); + TableDesc tableDesc = pd.getTableDesc(); + Properties tblProps = tableDesc.getProperties(); + + Class sdclass = pd.getDeserializerClass(); + if (sdclass == null) { + String className = pd.getSerdeClassName(); + if ((className == "") || (className == null)) { + throw new HiveException( + "SerDe class or the SerDe class name is not set for table: " + + pd.getProperties().getProperty("name")); + } + sdclass = hconf.getClassByName(className); + } + + Deserializer partDeserializer = (Deserializer) sdclass.newInstance(); + partDeserializer.initialize(hconf, partProps); + StructObjectInspector partRawRowObjectInspector = (StructObjectInspector) partDeserializer + .getObjectInspector(); + + StructObjectInspector tblRawRowObjectInspector = tableDescOI.get(tableDesc); + if (tblRawRowObjectInspector == null) { + sdclass = tableDesc.getDeserializerClass(); + if (sdclass == null) { + String className = tableDesc.getSerdeClassName(); + if ((className == "") || (className == null)) { + throw new HiveException( + "SerDe class or the SerDe class name is not set for table: " + + tableDesc.getProperties().getProperty("name")); + } + sdclass = hconf.getClassByName(className); + } + Deserializer tblDeserializer = (Deserializer) sdclass.newInstance(); + tblDeserializer.initialize(hconf, tblProps); + tblRawRowObjectInspector = (StructObjectInspector) tblDeserializer.getObjectInspector(); + tableDescOI.put(tableDesc, tblRawRowObjectInspector); + } + + if (partRawRowObjectInspector != tblRawRowObjectInspector) { + return false; + } + } + } catch (Exception e) { + throw new HiveException(e); + } + return true; + } + + private Map getConvertedOI(Configuration hconf) throws HiveException { + Map tableDescOI = + new HashMap(); + Set identityConverterTableDesc = new HashSet(); + try + { + for (String onefile : conf.getPathToAliases().keySet()) { + PartitionDesc pd = conf.getPathToPartitionInfo().get(onefile); + Properties partProps = pd.getProperties(); + TableDesc tableDesc = pd.getTableDesc(); + Properties tblProps = tableDesc.getProperties(); + + Class sdclass = pd.getDeserializerClass(); + if (sdclass == null) { + String className = pd.getSerdeClassName(); + if ((className == "") || (className == null)) { + throw new HiveException( + "SerDe class or the SerDe class name is not set for table: " + + pd.getProperties().getProperty("name")); + } + sdclass = hconf.getClassByName(className); + } + + Deserializer partDeserializer = (Deserializer) sdclass.newInstance(); + partDeserializer.initialize(hconf, partProps); + StructObjectInspector partRawRowObjectInspector = (StructObjectInspector) partDeserializer + .getObjectInspector(); + + StructObjectInspector tblRawRowObjectInspector = tableDescOI.get(tableDesc); + if ((tblRawRowObjectInspector == null) || (identityConverterTableDesc.contains(tableDesc))) { + sdclass = tableDesc.getDeserializerClass(); + if (sdclass == null) { + String className = tableDesc.getSerdeClassName(); + if ((className == "") || (className == null)) { + throw new HiveException( + "SerDe class or the SerDe class name is not set for table: " + + tableDesc.getProperties().getProperty("name")); + } + sdclass = hconf.getClassByName(className); + } + Deserializer tblDeserializer = (Deserializer) sdclass.newInstance(); + tblDeserializer.initialize(hconf, tblProps); + tblRawRowObjectInspector = (StructObjectInspector) ObjectInspectorConverters.getConverteredOI( + partRawRowObjectInspector, (StructObjectInspector) tblDeserializer.getObjectInspector()); + + if (identityConverterTableDesc.contains(tableDesc)) { + if (partRawRowObjectInspector != tblRawRowObjectInspector) { + identityConverterTableDesc.remove(tableDesc); + } + } + else if (partRawRowObjectInspector == tblRawRowObjectInspector) { + identityConverterTableDesc.add(tableDesc); + } + + tableDescOI.put(tableDesc, tblRawRowObjectInspector); + } + } + } catch (Exception e) { + throw new HiveException(e); + } + return tableDescOI; + } + public void setChildren(Configuration hconf) throws HiveException { Path fpath = new Path((new Path(HiveConf.getVar(hconf, @@ -375,10 +516,10 @@ operatorToPaths = new HashMap, ArrayList>(); statsMap.put(Counter.DESERIALIZE_ERRORS, deserialize_error_count); - + Map convertedOI = getConvertedOI(hconf); try { for (String onefile : conf.getPathToAliases().keySet()) { - MapOpCtx opCtx = initObjectInspector(conf, hconf, onefile); + MapOpCtx opCtx = initObjectInspector(conf, hconf, onefile, convertedOI); Path onepath = new Path(new Path(onefile).toUri().getPath()); List aliases = conf.getPathToAliases().get(onefile); @@ -514,16 +655,18 @@ Object row = null; try { if (this.hasVC) { - this.rowWithPartAndVC[0] = deserializer.deserialize(value); + this.rowWithPartAndVC[0] = + partTblObjectInspectorConverter.convert(deserializer.deserialize(value)); int vcPos = isPartitioned ? 2 : 1; if (context != null) { populateVirtualColumnValues(context, vcs, vcValues, deserializer); } this.rowWithPartAndVC[vcPos] = this.vcValues; } else if (!isPartitioned) { - row = deserializer.deserialize((Writable) value); + row = partTblObjectInspectorConverter.convert(deserializer.deserialize((Writable) value)); } else { - rowWithPart[0] = deserializer.deserialize((Writable) value); + rowWithPart[0] = + partTblObjectInspectorConverter.convert(deserializer.deserialize((Writable) value)); } } catch (Exception e) { // Serialize the row and output. @@ -542,22 +685,22 @@ try { if (this.hasVC) { - forward(this.rowWithPartAndVC, this.rowObjectInspector); + forward(this.rowWithPartAndVC, this.tblRowObjectInspector); } else if (!isPartitioned) { - forward(row, rowObjectInspector); + forward(row, tblRowObjectInspector); } else { - forward(rowWithPart, rowObjectInspector); + forward(rowWithPart, tblRowObjectInspector); } } catch (Exception e) { // Serialize the row and output the error message. String rowString; try { if (this.hasVC) { - rowString = SerDeUtils.getJSONString(rowWithPartAndVC, rowObjectInspector); + rowString = SerDeUtils.getJSONString(rowWithPartAndVC, tblRowObjectInspector); } else if (!isPartitioned) { - rowString = SerDeUtils.getJSONString(row, rowObjectInspector); + rowString = SerDeUtils.getJSONString(row, tblRowObjectInspector); } else { - rowString = SerDeUtils.getJSONString(rowWithPart, rowObjectInspector); + rowString = SerDeUtils.getJSONString(rowWithPart, tblRowObjectInspector); } } catch (Exception e2) { rowString = "[Error getting row data with exception " + Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java (working copy) @@ -109,8 +109,6 @@ dummyOp.setExecContext(execContext); dummyOp.initialize(jc,null); } - - } catch (Throwable e) { abort = true; if (e instanceof OutOfMemoryError) { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (working copy) @@ -50,6 +50,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.DelegatedObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; @@ -92,6 +94,9 @@ private transient Writable value; private transient Writable[] vcValues; private transient Deserializer serde; + private transient Deserializer tblSerde; + Converter partTblObjectInspectorConverter; + private transient Iterator iterPath; private transient Iterator iterPartDesc; private transient Path currPath; @@ -223,17 +228,26 @@ private StructObjectInspector setTableDesc(TableDesc table) throws Exception { Deserializer serde = table.getDeserializerClass().newInstance(); serde.initialize(job, table.getProperties()); - return createRowInspector(getCurrent(serde)); + return createRowInspector(getCurrent(serde.getObjectInspector())); } private StructObjectInspector setPrtnDesc(PartitionDesc partition) throws Exception { Deserializer serde = partition.getDeserializerClass().newInstance(); serde.initialize(job, partition.getProperties()); + + Deserializer tblSerde = partition.getTableDesc().getDeserializerClass().newInstance(); + tblSerde.initialize(job, partition.getTableDesc().getProperties()); + String pcols = partition.getTableDesc().getProperties().getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS); String[] partKeys = pcols.trim().split("/"); row[1] = createPartValue(partKeys, partition.getPartSpec()); - return createRowInspector(getCurrent(serde), partKeys); + + ObjectInspector outputOI = ObjectInspectorConverters.getConverterOutputOI( + serde.getObjectInspector(), + tblSerde.getObjectInspector(), true).getFirst(); + + return createRowInspector(getCurrent(outputOI), partKeys); } private StructObjectInspector setPrtnDesc(TableDesc table) throws Exception { @@ -243,11 +257,10 @@ org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS); String[] partKeys = pcols.trim().split("/"); row[1] = null; - return createRowInspector(getCurrent(serde), partKeys); + return createRowInspector(getCurrent(serde.getObjectInspector()), partKeys); } - private StructObjectInspector getCurrent(Deserializer serde) throws SerDeException { - ObjectInspector current = serde.getObjectInspector(); + private StructObjectInspector getCurrent(ObjectInspector current) throws SerDeException { if (objectInspector != null) { current = DelegatedObjectInspectorFactory.reset(objectInspector, current); } else { @@ -384,6 +397,18 @@ serde = tmp.getDeserializerClass().newInstance(); serde.initialize(job, tmp.getProperties()); + if (currTbl != null) { + tblSerde = serde; + } + else { + tblSerde = currPart.getTableDesc().getDeserializerClass().newInstance(); + tblSerde.initialize(job, currPart.getTableDesc().getProperties()); + } + + partTblObjectInspectorConverter = ObjectInspectorConverters.getConverterOutputOI( + serde.getObjectInspector(), + tblSerde.getObjectInspector(), true).getSecond(); + if (LOG.isDebugEnabled()) { LOG.debug("Creating fetchTask with deserializer typeinfo: " + serde.getObjectInspector().getTypeName()); @@ -503,14 +528,15 @@ vcValues = MapOperator.populateVirtualColumnValues(context, vcCols, vcValues, serde); row[isPartitioned ? 2 : 1] = vcValues; } - row[0] = serde.deserialize(value); + row[0] = partTblObjectInspectorConverter.convert(serde.deserialize(value)); + if (hasVC || isPartitioned) { inspectable.o = row; inspectable.oi = rowObjectInspector; return inspectable; } inspectable.o = row[0]; - inspectable.oi = serde.getObjectInspector(); + inspectable.oi = tblSerde.getObjectInspector(); return inspectable; } else { currRecReader.close(); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java (working copy) @@ -30,6 +30,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.persistence.RowContainer; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -40,7 +41,6 @@ import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.SMBJoinDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; -import org.apache.hadoop.hive.ql.util.ObjectPair; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; Index: ql/src/java/org/apache/hadoop/hive/ql/util/ObjectPair.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/util/ObjectPair.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/util/ObjectPair.java (working copy) @@ -1,47 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.util; - -public class ObjectPair { - private F first; - private S second; - - public ObjectPair() {} - - public ObjectPair(F first, S second) { - this.first = first; - this.second = second; - } - - public F getFirst() { - return first; - } - - public void setFirst(F first) { - this.first = first; - } - - public S getSecond() { - return second; - } - - public void setSecond(S second) { - this.second = second; - } -} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java (working copy) @@ -87,7 +87,7 @@ public PartitionDesc(final org.apache.hadoop.hive.ql.metadata.Partition part) throws HiveException { tableDesc = Utilities.getTableDesc(part.getTable()); - properties = part.getSchema(); + properties = part.getSchemaFromPartitionSchema(); partSpec = part.getSpec(); deserializerClass = part.getDeserializer(properties).getClass(); inputFileFormatClass = part.getInputFormatClass(); Index: ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java (working copy) @@ -20,9 +20,9 @@ import java.io.Serializable; import java.util.Enumeration; -import java.util.Properties; import java.util.LinkedHashMap; import java.util.Map; +import java.util.Properties; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; @@ -149,7 +149,7 @@ org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE) != null); } - + @Override public Object clone() { TableDesc ret = new TableDesc(); @@ -170,4 +170,42 @@ } return ret; } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((deserializerClass == null) ? 0 : deserializerClass.hashCode()); + result = prime * result + + ((inputFileFormatClass == null) ? 0 : inputFileFormatClass.hashCode()); + result = prime * result + + ((outputFileFormatClass == null) ? 0 : outputFileFormatClass.hashCode()); + result = prime * result + ((properties == null) ? 0 : properties.hashCode()); + result = prime * result + ((serdeClassName == null) ? 0 : serdeClassName.hashCode()); + result = prime * result + ((jobProperties == null) ? 0 : jobProperties.hashCode()); + return result; + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof TableDesc)) { + return false; + } + + TableDesc target = (TableDesc) o; + boolean ret = true; + ret = ret && (deserializerClass == null ? target.deserializerClass == null : + deserializerClass.equals(target.deserializerClass)); + ret = ret && (inputFileFormatClass == null ? target.inputFileFormatClass == null : + inputFileFormatClass.equals(target.inputFileFormatClass)); + ret = ret && (outputFileFormatClass == null ? target.outputFileFormatClass == null : + outputFileFormatClass.equals(target.outputFileFormatClass)); + ret = ret && (properties == null ? target.properties == null : + properties.equals(target.properties)); + ret = ret && (serdeClassName == null ? target.serdeClassName == null : + serdeClassName.equals(target.serdeClassName)); + ret = ret && (jobProperties == null ? target.jobProperties == null : + jobProperties.equals(target.jobProperties)); + return ret; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -41,6 +41,7 @@ import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.TableType; @@ -165,7 +166,6 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; -import org.apache.hadoop.hive.ql.util.ObjectPair; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; Index: ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java (working copy) @@ -226,22 +226,27 @@ private static class CombinePathInputFormat { private final List> opList; private final String inputFormatClassName; + private final String deserializerClassName; public CombinePathInputFormat(List> opList, - String inputFormatClassName) { + String inputFormatClassName, + String deserializerClassName) { this.opList = opList; this.inputFormatClassName = inputFormatClassName; + this.deserializerClassName = deserializerClassName; } @Override public boolean equals(Object o) { if (o instanceof CombinePathInputFormat) { - CombinePathInputFormat mObj = (CombinePathInputFormat)o; + CombinePathInputFormat mObj = (CombinePathInputFormat) o; if (mObj == null) { return false; } - return opList.equals(mObj.opList) && - inputFormatClassName.equals(mObj.inputFormatClassName); + return (opList.equals(mObj.opList)) && + (inputFormatClassName.equals(mObj.inputFormatClassName)) && + (deserializerClassName == null ? (mObj.deserializerClassName == null) : + deserializerClassName.equals(mObj.deserializerClassName)); } return false; } @@ -296,6 +301,8 @@ Class inputFormatClass = part.getInputFileFormatClass(); String inputFormatClassName = inputFormatClass.getName(); InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job); + String deserializerClassName = part.getDeserializerClass() == null ? null + : part.getDeserializerClass().getName(); // Since there is no easy way of knowing whether MAPREDUCE-1597 is present in the tree or not, // we use a configuration variable for the same @@ -342,12 +349,24 @@ // Does a pool exist for this path already CombineFilter f = null; List> opList = null; - boolean done = false; if (!mrwork.isMapperCannotSpanPartns()) { opList = HiveFileFormatUtils.doGetWorksFromPath( pathToAliases, aliasToWork, filterPath); - f = poolMap.get(new CombinePathInputFormat(opList, inputFormatClassName)); + CombinePathInputFormat combinePathInputFormat = + new CombinePathInputFormat(opList, inputFormatClassName, deserializerClassName); + f = poolMap.get(combinePathInputFormat); + if (f == null) { + f = new CombineFilter(filterPath); + LOG.info("CombineHiveInputSplit creating pool for " + path + + "; using filter path " + filterPath); + combine.createPool(job, f); + poolMap.put(combinePathInputFormat, f); + } else { + LOG.info("CombineHiveInputSplit: pool is already created for " + path + + "; using filter path " + filterPath); + f.addPath(filterPath); + } } else { // In the case of tablesample, the input paths are pointing to files rather than directories. // We need to get the parent directory as the filtering path so that all files in the same @@ -361,24 +380,7 @@ } else { inpDirs.add(path); } - done = true; } - - if (!done) { - if (f == null) { - f = new CombineFilter(filterPath); - LOG.info("CombineHiveInputSplit creating pool for " + path + - "; using filter path " + filterPath); - combine.createPool(job, f); - if (!mrwork.isMapperCannotSpanPartns()) { - poolMap.put(new CombinePathInputFormat(opList, inputFormatClassName), f); - } - } else { - LOG.info("CombineHiveInputSplit: pool is already created for " + path + - "; using filter path " + filterPath); - f.addPath(filterPath); - } - } } // Processing directories Index: ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java (revision 1426896) +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java (working copy) @@ -405,7 +405,7 @@ } /** - * Get the list of operatators from the opeerator tree that are needed for the path + * Get the list of operators from the operator tree that are needed for the path * @param pathToAliases mapping from path to aliases * @param aliasToWork The operator tree to be invoked for a given alias * @param dir The path to look for