diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 931533a..bd0d86f 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -3614,6 +3614,8 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "internal use only. When false, don't suppress fatal exceptions like\n" + "NullPointerException, etc so the query will fail and assure it will be noticed", true), + HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS("hive.exec.orc.use.decimal64.column.vectors", true, + "Whether ORC readers should make use of fast decimal64 column vectors when possible"), HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true, "This property has been extended to control " + "whether to check, convert, and normalize partition value to conform to its column type in " diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java index 0af91bd..b828f4c 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java @@ -663,8 +663,8 @@ public void testAcidInsertWithRemoveUnion() throws Exception { } String[][] expected2 = { - {"{\"writeid\":1,\"bucketid\":536870913,\"rowid\":1}\t1\t2", "warehouse/t/delta_0000001_0000001_0001/bucket_00000"}, - {"{\"writeid\":1,\"bucketid\":536870913,\"rowid\":0}\t3\t4", "warehouse/t/delta_0000001_0000001_0001/bucket_00000"}, + {"{\"writeid\":1,\"bucketid\":536870913,\"rowid\":0}\t1\t2", "warehouse/t/delta_0000001_0000001_0001/bucket_00000"}, + {"{\"writeid\":1,\"bucketid\":536870913,\"rowid\":1}\t3\t4", "warehouse/t/delta_0000001_0000001_0001/bucket_00000"}, {"{\"writeid\":1,\"bucketid\":536870914,\"rowid\":0}\t5\t6", "warehouse/t/delta_0000001_0000001_0002/bucket_00000"}, {"{\"writeid\":1,\"bucketid\":536870914,\"rowid\":1}\t7\t8", "warehouse/t/delta_0000001_0000001_0002/bucket_00000"}, {"{\"writeid\":1,\"bucketid\":536870915,\"rowid\":0}\t9\t10", "warehouse/t/delta_0000001_0000001_0003/bucket_00000"} diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index d146f92..e393967 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -553,6 +553,9 @@ minillaplocal.query.files=\ llap_partitioned.q,\ llap_smb.q,\ llap_vector_nohybridgrace.q,\ + llap_uncompressed.q,\ + llap_decimal64_reader.q,\ + llap_text.q,\ load_data_acid_rename.q,\ load_data_using_job.q,\ load_dyn_part5.q,\ diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java index 6d29163..3c3f4a5 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.llap.io.api.impl; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.BatchToRowInputFormat; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -229,4 +230,9 @@ static TableScanOperator findTsOp(MapWork mapWork) throws HiveException { } return tableScanOperator; } + + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return new VectorizedSupport.Support[] {VectorizedSupport.Support.DECIMAL_64}; + } } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java index 7af1b05..32f3bed 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java @@ -31,13 +31,11 @@ import org.apache.hadoop.hive.llap.counters.QueryFragmentCounters; import org.apache.hadoop.hive.llap.io.api.impl.ColumnVectorBatch; import org.apache.hadoop.hive.llap.io.api.impl.LlapIoImpl; -import org.apache.hadoop.hive.llap.io.decode.ColumnVectorProducer.Includes; import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader; import org.apache.hadoop.hive.llap.io.metadata.ConsumerFileMetadata; import org.apache.hadoop.hive.llap.io.metadata.ConsumerStripeMetadata; import org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheMetrics; import org.apache.hadoop.hive.llap.metrics.LlapDaemonIOMetrics; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.orc.encoded.Consumer; import org.apache.hadoop.hive.ql.io.orc.encoded.IoTrace; @@ -52,8 +50,8 @@ import org.apache.hadoop.mapred.Reporter; import org.apache.hive.common.util.FixedSizedObjectPool; import org.apache.orc.CompressionKind; +import org.apache.orc.OrcFile; import org.apache.orc.OrcProto; -import org.apache.orc.OrcUtils; import org.apache.orc.OrcProto.ColumnEncoding; import org.apache.orc.OrcProto.RowIndex; import org.apache.orc.OrcProto.RowIndexEntry; @@ -289,5 +287,10 @@ public CompressionKind getCompressionKind() { public TypeDescription getSchema() { return schema; } + + @Override + public OrcFile.Version getFileVersion() { + return null; + } } } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java index feccb87..0d7435c 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.llap.metrics.LlapDaemonIOMetrics; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; @@ -73,10 +74,11 @@ private IoTrace trace; private final Includes includes; private TypeDescription[] batchSchemas; + private boolean useDecimal64ColumnVectors; public OrcEncodedDataConsumer( - Consumer consumer, Includes includes, boolean skipCorrupt, - QueryFragmentCounters counters, LlapDaemonIOMetrics ioMetrics) { + Consumer consumer, Includes includes, boolean skipCorrupt, + QueryFragmentCounters counters, LlapDaemonIOMetrics ioMetrics) { super(consumer, includes.getPhysicalColumnIds().size(), ioMetrics); this.includes = includes; // TODO: get rid of this @@ -84,6 +86,10 @@ public OrcEncodedDataConsumer( this.counters = counters; } + public void setUseDecimal64ColumnVectors(final boolean useDecimal64ColumnVectors) { + this.useDecimal64ColumnVectors = useDecimal64ColumnVectors; + } + public void setFileMetadata(ConsumerFileMetadata f) { assert fileMetadata == null; fileMetadata = f; @@ -153,7 +159,7 @@ protected void decodeBatch(OrcEncodedColumnBatch batch, if (cvb.cols[idx] == null) { // Orc store rows inside a root struct (hive writes it this way). // When we populate column vectors we skip over the root struct. - cvb.cols[idx] = createColumn(batchSchemas[idx], VectorizedRowBatch.DEFAULT_SIZE); + cvb.cols[idx] = createColumn(batchSchemas[idx], VectorizedRowBatch.DEFAULT_SIZE, useDecimal64ColumnVectors); } trace.logTreeReaderNextVector(idx); @@ -217,10 +223,10 @@ private void createColumnReaders(OrcEncodedColumnBatch batch, TreeReaderFactory.Context context = new TreeReaderFactory.ReaderContext() .setSchemaEvolution(evolution).skipCorrupt(skipCorrupt) .writerTimeZone(stripeMetadata.getWriterTimezone()) - ; + .fileFormat(fileMetadata == null ? null : fileMetadata.getFileVersion()); this.batchSchemas = includes.getBatchReaderTypes(fileSchema); StructTreeReader treeReader = EncodedTreeReaderFactory.createRootTreeReader( - batchSchemas, stripeMetadata.getEncodings(), batch, codec, context); + batchSchemas, stripeMetadata.getEncodings(), batch, codec, context, useDecimal64ColumnVectors); this.columnReaders = treeReader.getChildReaders(); if (LlapIoImpl.LOG.isDebugEnabled()) { @@ -232,7 +238,7 @@ private void createColumnReaders(OrcEncodedColumnBatch batch, positionInStreams(columnReaders, batch.getBatchKey(), stripeMetadata); } - private ColumnVector createColumn(TypeDescription type, int batchSize) { + private ColumnVector createColumn(TypeDescription type, int batchSize, final boolean useDecimal64ColumnVectors) { switch (type.getCategory()) { case BOOLEAN: case BYTE: @@ -252,30 +258,34 @@ private ColumnVector createColumn(TypeDescription type, int batchSize) { case TIMESTAMP: return new TimestampColumnVector(batchSize); case DECIMAL: - return new DecimalColumnVector(batchSize, type.getPrecision(), - type.getScale()); + if (useDecimal64ColumnVectors && type.getPrecision() <= TypeDescription.MAX_DECIMAL64_PRECISION) { + return new Decimal64ColumnVector(batchSize, type.getPrecision(), type.getScale()); + } else { + return new DecimalColumnVector(batchSize, type.getPrecision(), type.getScale()); + } case STRUCT: { List subtypeIdxs = type.getChildren(); ColumnVector[] fieldVector = new ColumnVector[subtypeIdxs.size()]; - for(int i = 0; i < fieldVector.length; ++i) { - fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize); + for (int i = 0; i < fieldVector.length; ++i) { + fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize, useDecimal64ColumnVectors); } return new StructColumnVector(batchSize, fieldVector); } case UNION: { List subtypeIdxs = type.getChildren(); ColumnVector[] fieldVector = new ColumnVector[subtypeIdxs.size()]; - for(int i=0; i < fieldVector.length; ++i) { - fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize); + for (int i = 0; i < fieldVector.length; ++i) { + fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize, useDecimal64ColumnVectors); } return new UnionColumnVector(batchSize, fieldVector); } case LIST: - return new ListColumnVector(batchSize, createColumn(type.getChildren().get(0), batchSize)); + return new ListColumnVector(batchSize, createColumn(type.getChildren().get(0), batchSize, + useDecimal64ColumnVectors)); case MAP: List subtypeIdxs = type.getChildren(); - return new MapColumnVector(batchSize, createColumn(subtypeIdxs.get(0), batchSize), - createColumn(subtypeIdxs.get(1), batchSize)); + return new MapColumnVector(batchSize, createColumn(subtypeIdxs.get(0), batchSize, useDecimal64ColumnVectors), + createColumn(subtypeIdxs.get(1), batchSize, useDecimal64ColumnVectors)); default: throw new IllegalArgumentException("LLAP does not support " + type.getCategory()); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java index 4033b37..f06ec67 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java @@ -24,37 +24,17 @@ import java.util.Arrays; import java.util.List; -import org.apache.hadoop.hive.llap.counters.LlapIOCounters; -import org.apache.orc.CompressionCodec; -import org.apache.orc.OrcProto.BloomFilterIndex; -import org.apache.orc.OrcProto.FileTail; -import org.apache.orc.OrcProto.RowIndex; -import org.apache.orc.OrcProto.Stream; -import org.apache.orc.OrcProto.StripeStatistics; -import org.apache.orc.TypeDescription; -import org.apache.orc.impl.BufferChunk; -import org.apache.orc.impl.DataReaderProperties; -import org.apache.orc.impl.InStream; -import org.apache.orc.impl.OrcCodecPool; -import org.apache.orc.impl.OrcIndex; -import org.apache.orc.impl.OrcTail; -import org.apache.orc.impl.ReaderImpl; -import org.apache.orc.impl.SchemaEvolution; -import org.apache.orc.impl.WriterImpl; -import org.apache.tez.common.counters.TezCounters; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.Pool; import org.apache.hadoop.hive.common.Pool.PoolObjectHelper; +import org.apache.hadoop.hive.common.io.Allocator; import org.apache.hadoop.hive.common.io.Allocator.BufferObjectFactory; import org.apache.hadoop.hive.common.io.DataCache; -import org.apache.hadoop.hive.common.io.Allocator; -import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData; import org.apache.hadoop.hive.common.io.DiskRange; import org.apache.hadoop.hive.common.io.DiskRangeList; +import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData; import org.apache.hadoop.hive.common.io.encoded.MemoryBuffer; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -65,40 +45,58 @@ import org.apache.hadoop.hive.llap.cache.LlapDataBuffer; import org.apache.hadoop.hive.llap.cache.LowLevelCache; import org.apache.hadoop.hive.llap.cache.LowLevelCache.Priority; +import org.apache.hadoop.hive.llap.counters.LlapIOCounters; import org.apache.hadoop.hive.llap.counters.QueryFragmentCounters; import org.apache.hadoop.hive.llap.io.api.impl.LlapIoImpl; import org.apache.hadoop.hive.llap.io.decode.ColumnVectorProducer.Includes; import org.apache.hadoop.hive.llap.io.decode.ColumnVectorProducer.SchemaEvolutionFactory; import org.apache.hadoop.hive.llap.io.decode.OrcEncodedDataConsumer; -import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata; import org.apache.hadoop.hive.llap.io.metadata.MetadataCache; import org.apache.hadoop.hive.llap.io.metadata.MetadataCache.LlapBufferOrBuffers; +import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata; import org.apache.hadoop.hive.llap.io.metadata.OrcStripeMetadata; -import org.apache.hadoop.hive.ql.exec.DDLTask; -import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.HdfsUtils; -import org.apache.orc.CompressionKind; -import org.apache.orc.DataReader; import org.apache.hadoop.hive.ql.io.orc.OrcFile; import org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions; -import org.apache.orc.OrcConf; import org.apache.hadoop.hive.ql.io.orc.OrcSplit; -import org.apache.hadoop.hive.ql.io.orc.encoded.Reader; import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl; import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedOrcFile; import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedReader; import org.apache.hadoop.hive.ql.io.orc.encoded.IoTrace; import org.apache.hadoop.hive.ql.io.orc.encoded.OrcBatchKey; +import org.apache.hadoop.hive.ql.io.orc.encoded.Reader; import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch; import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.PoolFactory; -import org.apache.orc.impl.RecordReaderUtils; -import org.apache.orc.StripeInformation; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hive.common.util.FixedSizedObjectPool; +import org.apache.orc.CompressionCodec; +import org.apache.orc.CompressionKind; +import org.apache.orc.DataReader; +import org.apache.orc.OrcConf; import org.apache.orc.OrcProto; +import org.apache.orc.OrcProto.BloomFilterIndex; +import org.apache.orc.OrcProto.FileTail; +import org.apache.orc.OrcProto.RowIndex; +import org.apache.orc.OrcProto.Stream; +import org.apache.orc.OrcProto.StripeStatistics; +import org.apache.orc.StripeInformation; +import org.apache.orc.TypeDescription; +import org.apache.orc.impl.BufferChunk; +import org.apache.orc.impl.DataReaderProperties; +import org.apache.orc.impl.InStream; +import org.apache.orc.impl.OrcCodecPool; +import org.apache.orc.impl.OrcIndex; +import org.apache.orc.impl.OrcTail; +import org.apache.orc.impl.ReaderImpl; +import org.apache.orc.impl.RecordReaderUtils; +import org.apache.orc.impl.SchemaEvolution; +import org.apache.orc.impl.WriterImpl; import org.apache.tez.common.CallableWithNdc; +import org.apache.tez.common.counters.TezCounters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; @@ -233,6 +231,8 @@ public OrcEncodedDataReader(LowLevelCache lowLevelCache, BufferUsageManager buff this.jobConf = jobConf; // TODO: setFileMetadata could just create schema. Called in two places; clean up later. this.evolution = sef.createSchemaEvolution(fileMetadata.getSchema()); + consumer.setUseDecimal64ColumnVectors(HiveConf.getBoolVar(jobConf, + ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS)); consumer.setFileMetadata(fileMetadata); consumer.setSchemaEvolution(evolution); } @@ -571,7 +571,8 @@ private OrcFileMetadata getFileFooterFromCacheOrDisk() throws IOException { stripes.add(new ReaderImpl.StripeInformationImpl(stripeProto)); } return new OrcFileMetadata( - fileKey, tail.getFooter(), tail.getPostscript(), stats, stripes); + fileKey, tail.getFooter(), tail.getPostscript(), stats, stripes, + ReaderImpl.getFileVersion(tail.getPostscript().getVersionList())); } finally { // We don't need the buffer anymore. metadataCache.decRefBuffer(tailBuffers); @@ -588,7 +589,7 @@ private OrcFileMetadata getFileFooterFromCacheOrDisk() throws IOException { } FileTail ft = orcReader.getFileTail(); return new OrcFileMetadata(fileKey, ft.getFooter(), ft.getPostscript(), - orcReader.getOrcProtoStripeStatistics(), orcReader.getStripes()); + orcReader.getOrcProtoStripeStatistics(), orcReader.getStripes(), orcReader.getFileVersion()); } private OrcProto.StripeFooter buildStripeFooter( @@ -766,7 +767,8 @@ private boolean determineRgsToRead(int rowIndexStride, if (sarg != null && rowIndexStride != 0) { sargApp = new RecordReaderImpl.SargApplier(sarg, rowIndexStride, evolution, - OrcFile.WriterVersion.from(fileMetadata.getWriterVersionNum())); + OrcFile.WriterVersion.from(OrcFile.WriterImplementation.ORC_JAVA, fileMetadata.getWriterVersionNum()), + false); } boolean hasAnyData = false; // stripeRgs should have been initialized by this time with an empty array. diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java index 1cfe929..c35de0e 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java @@ -221,6 +221,9 @@ public MemoryBuffer create() { this.sourceSerDe = sourceSerDe; this.reporter = reporter; this.jobConf = jobConf; + final boolean useDecimal64ColumnVectors = HiveConf.getBoolVar(jobConf, ConfVars + .HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS); + consumer.setUseDecimal64ColumnVectors(useDecimal64ColumnVectors); this.schema = schema; this.writerIncludes = OrcInputFormat.genIncludedColumns(schema, columnIds); SchemaEvolution evolution = new SchemaEvolution(schema, null, @@ -563,6 +566,23 @@ public void setCurrentStripeOffsets(long currentKnownTornStart, public CompressionCodec getCompressionCodec() { return null; } + + @Override + public long getFileBytes(int column) { + long size = 0L; + List l = this.colStreams.get(column); + if (l == null) { + return size; + } + for (CacheOutputReceiver c : l) { + if (c.getData() != null && !c.suppressed && c.getName().getArea() != StreamName.Area.INDEX) { + for (MemoryBuffer buffer : c.getData()) { + size += buffer.getByteBufferRaw().limit(); + } + } + } + return size; + } } private interface CacheOutput { diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java index de19b1d..f1e51e8 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java @@ -20,14 +20,19 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; +import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.llap.DebugUtils; @@ -35,10 +40,12 @@ import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader.CacheWriter; import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader.DeserializerOrcWriter; import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader.EncodingWriter; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.orc.Writer; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -46,14 +53,20 @@ import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.lazy.fast.LazySimpleDeserializeRead; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; +import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.BinaryComparable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.InputFormat; @@ -121,12 +134,12 @@ private VectorDeserializeOrcWriter(Configuration conf, Properties tblProps, int allocSize) throws IOException { super(sourceOi, allocSize); // See also: the usage of VectorDeserializeType, for binary. For now, we only want text. - this.vrbCtx = createVrbCtx(sourceOi); + this.vrbCtx = createVrbCtx(sourceOi, tblProps); this.sourceIncludes = sourceIncludes; this.cacheIncludes = cacheIncludes; this.sourceBatch = vrbCtx.createVectorizedRowBatch(); deserializeRead = new LazySimpleDeserializeRead(vrbCtx.getRowColumnTypeInfos(), - /* useExternalBuffer */ true, createSerdeParams(conf, tblProps)); + vrbCtx.getRowdataTypePhysicalVariations(),/* useExternalBuffer */ true, createSerdeParams(conf, tblProps)); vectorDeserializeRow = new VectorDeserializeRow(deserializeRead); int colCount = vrbCtx.getRowColumnTypeInfos().length; boolean[] includes = null; @@ -192,13 +205,40 @@ public void startAsync(AsyncCallback callback) { this.orcThread.start(); } - private static VectorizedRowBatchCtx createVrbCtx(StructObjectInspector oi) throws IOException { + private static VectorizedRowBatchCtx createVrbCtx(StructObjectInspector oi, final Properties tblProps) throws IOException { + final String serde = tblProps.getProperty(serdeConstants.SERIALIZATION_LIB); + final String inputFormat = tblProps.getProperty(hive_metastoreConstants.FILE_INPUT_FORMAT); + final boolean isTextFormat = inputFormat != null && inputFormat.equals(TextInputFormat.class.getName()) && + serde != null && serde.equals(LazySimpleSerDe.class.getName()); + List dataTypePhysicalVariations = new ArrayList<>(); + if (isTextFormat) { + Set supportSet = new HashSet<>(Arrays.asList(VectorizedSupport.Support.values())); + StructTypeInfo structTypeInfo = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(oi); + int dataColumnCount = structTypeInfo.getAllStructFieldTypeInfos().size(); + final boolean isSupportDecimal64 = supportSet.contains(VectorizedSupport.Support.DECIMAL_64); + for (int i = 0; i < dataColumnCount; i++) { + DataTypePhysicalVariation dataTypePhysicalVariation = DataTypePhysicalVariation.NONE; + if (isSupportDecimal64) { + TypeInfo typeInfo = structTypeInfo.getAllStructFieldTypeInfos().get(i); + if (typeInfo instanceof DecimalTypeInfo) { + DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; + if (HiveDecimalWritable.isPrecisionDecimal64(decimalTypeInfo.precision())) { + dataTypePhysicalVariation = DataTypePhysicalVariation.DECIMAL_64; + } + } + } + dataTypePhysicalVariations.add(dataTypePhysicalVariation); + } + } VectorizedRowBatchCtx vrbCtx = new VectorizedRowBatchCtx(); try { vrbCtx.init(oi, new String[0]); } catch (HiveException e) { throw new IOException(e); } + if (!dataTypePhysicalVariations.isEmpty()) { + vrbCtx.setRowDataTypePhysicalVariations(dataTypePhysicalVariations.toArray(new DataTypePhysicalVariation[0])); + } return vrbCtx; } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java index bf139c0..d6b16ef 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java @@ -20,6 +20,8 @@ import java.util.List; import org.apache.orc.CompressionKind; +import org.apache.orc.FileFormatException; +import org.apache.orc.OrcFile; import org.apache.orc.OrcProto.Type; import org.apache.orc.TypeDescription; @@ -27,5 +29,6 @@ int getStripeCount(); CompressionKind getCompressionKind(); List getTypes(); - TypeDescription getSchema(); + TypeDescription getSchema() throws FileFormatException; + OrcFile.Version getFileVersion(); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java index 0012afb..5eb713c 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java @@ -22,7 +22,9 @@ import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.orc.CompressionKind; +import org.apache.orc.FileFormatException; import org.apache.orc.FileMetadata; +import org.apache.orc.OrcFile; import org.apache.orc.OrcProto; import org.apache.orc.OrcProto.StripeStatistics; import org.apache.orc.OrcUtils; @@ -48,9 +50,10 @@ private final long contentLength; private final long numberOfRows; private final boolean isOriginalFormat; + private final OrcFile.Version fileVersion; public OrcFileMetadata(Object fileKey, OrcProto.Footer footer, OrcProto.PostScript ps, - List stats, List stripes) { + List stats, List stripes, final OrcFile.Version fileVersion) { this.stripeStats = stats; this.compressionKind = CompressionKind.valueOf(ps.getCompression().name()); this.compressionBufferSize = (int)ps.getCompressionBlockSize(); @@ -65,6 +68,7 @@ public OrcFileMetadata(Object fileKey, OrcProto.Footer footer, OrcProto.PostScri this.numberOfRows = footer.getNumberOfRows(); this.fileStats = footer.getStatisticsList(); this.fileKey = fileKey; + this.fileVersion = fileVersion; } // FileMetadata @@ -124,6 +128,11 @@ public int getMetadataSize() { } @Override + public int getWriterImplementation() { + return OrcFile.WriterImplementation.ORC_JAVA.getId(); + } + + @Override public int getWriterVersionNum() { return writerVersionNum; } @@ -153,7 +162,12 @@ public int getStripeCount() { return stripes.size(); } - public TypeDescription getSchema() { + public TypeDescription getSchema() throws FileFormatException { return OrcUtils.convertTypeFromProtobuf(this.types, 0); } + + @Override + public OrcFile.Version getFileVersion() { + return fileVersion; + } } diff --git a/pom.xml b/pom.xml index 1f43c41..30e32cc 100644 --- a/pom.xml +++ b/pom.xml @@ -184,7 +184,7 @@ 0.9.3 2.10.0 2.3 - 1.4.3 + 1.5.0 1.10.19 2.0.0-M5 4.1.17.Final diff --git a/ql/pom.xml b/ql/pom.xml index 06124f7..d52c307 100644 --- a/ql/pom.xml +++ b/ql/pom.xml @@ -943,6 +943,7 @@ org.apache.hive:hive-spark-client org.apache.hive:hive-storage-api org.apache.orc:orc-core + org.apache.orc:orc-shims org.apache.orc:orc-tools joda-time:joda-time diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java index 2246901..183fae5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java @@ -214,8 +214,8 @@ public void setWork(FetchWork work) { private static final Map inputFormats = new HashMap(); @SuppressWarnings("unchecked") - static InputFormat getInputFormatFromCache( - Class inputFormatClass, JobConf conf) throws IOException { + public static InputFormat getInputFormatFromCache( + Class inputFormatClass, Configuration conf) throws IOException { if (Configurable.class.isAssignableFrom(inputFormatClass) || JobConfigurable.class.isAssignableFrom(inputFormatClass)) { return ReflectionUtil.newInstance(inputFormatClass, conf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java index e74b185..8ee59e4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java @@ -24,4 +24,5 @@ */ public interface VectorizedInputFormatInterface { + VectorizedSupport.Support[] getSupportedFeatures(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java index 6588385..ffbfb6f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java @@ -163,6 +163,11 @@ public VectorizedRowBatchCtx( return rowDataTypePhysicalVariations; } + public void setRowDataTypePhysicalVariations( + final DataTypePhysicalVariation[] rowDataTypePhysicalVariations) { + this.rowDataTypePhysicalVariations = rowDataTypePhysicalVariations; + } + public int[] getDataColumnNums() { return dataColumnNums; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java index e632d43..6434414 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java @@ -29,6 +29,7 @@ import java.io.IOException; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.io.NullWritable; @@ -50,6 +51,11 @@ static final int MAX_ROW = 100; // to prevent infinite loop static final Logger LOG = LoggerFactory.getLogger(NullRowsRecordReader.class.getName()); + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return null; + } + public static class DummyInputSplit extends FileSplit { @SuppressWarnings("unused") // Serialization ctor. private DummyInputSplit() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java index f461364..9551a2a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java @@ -29,7 +29,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.NavigableMap; @@ -61,6 +60,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.AcidInputFormat; import org.apache.hadoop.hive.ql.io.AcidOutputFormat; import org.apache.hadoop.hive.ql.io.AcidUtils; @@ -116,6 +116,7 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.Ref; import org.apache.orc.ColumnStatistics; +import org.apache.orc.FileFormatException; import org.apache.orc.OrcProto; import org.apache.orc.OrcProto.Footer; import org.apache.orc.OrcProto.Type; @@ -161,6 +162,11 @@ SelfDescribingInputFormatInterface, AcidInputFormat, CombineHiveInputFormat.AvoidSplitCombination, BatchToRowInputFormat { + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return new VectorizedSupport.Support[] {VectorizedSupport.Support.DECIMAL_64}; + } + static enum SplitStrategyKind { HYBRID, BI, @@ -328,7 +334,7 @@ public static RecordReader createReaderFromFile(Reader file, List types = OrcUtils.getOrcTypes(schema); options.include(genIncludedColumns(schema, conf)); setSearchArgument(options, types, conf, isOriginal); - return file.rowsOptions(options); + return file.rowsOptions(options, conf); } public static boolean isOriginal(Reader file) { @@ -2167,7 +2173,7 @@ static Reader createOrcReaderForSplit(Configuration conf, OrcSplit orcSplit) thr public static boolean[] pickStripesViaTranslatedSarg(SearchArgument sarg, OrcFile.WriterVersion writerVersion, List types, - List stripeStats, int stripeCount) { + List stripeStats, int stripeCount) throws FileFormatException { LOG.info("Translated ORC pushdown predicate: " + sarg); assert sarg != null; if (stripeStats == null || writerVersion == OrcFile.WriterVersion.ORIGINAL) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java index 8c7c72e..d81921c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java @@ -23,6 +23,7 @@ import java.util.TreeMap; import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -231,16 +232,17 @@ public String toString() { * @param maxKey only return keys less than or equal to maxKey if it is * non-null * @param options options to provide to read the rows. + * @param conf * @throws IOException */ @VisibleForTesting ReaderPairAcid(ReaderKey key, Reader reader, - RecordIdentifier minKey, RecordIdentifier maxKey, - ReaderImpl.Options options) throws IOException { + RecordIdentifier minKey, RecordIdentifier maxKey, + ReaderImpl.Options options, final Configuration conf) throws IOException { this.reader = reader; this.key = key; // TODO use stripe statistics to jump over stripes - recordReader = reader.rowsOptions(options); + recordReader = reader.rowsOptions(options, conf); this.minKey = minKey; this.maxKey = maxKey; // advance the reader until we reach the minimum key @@ -437,7 +439,7 @@ static int encodeBucketId(Configuration conf, int bucketId, int statementId) { RecordIdentifier newMinKey = minKey; RecordIdentifier newMaxKey = maxKey; - recordReader = reader.rowsOptions(options); + recordReader = reader.rowsOptions(options, conf); /** * Logically each bucket consists of 0000_0, 0000_0_copy_1... 0000_0_copy_N. etc We don't * know N a priori so if this is true, then the current split is from 0000_0_copy_N file. @@ -586,7 +588,7 @@ public void next(OrcStruct next) throws IOException { throw new IllegalStateException("No 'original' files found for bucketId=" + this.bucketId + " in " + mergerOptions.getRootPath()); } - recordReader = getReader().rowsOptions(options); + recordReader = getReader().rowsOptions(options, conf); next(nextRecord());//load 1st row } @Override public RecordReader getRecordReader() { @@ -620,7 +622,7 @@ public void next(OrcStruct next) throws IOException { nextRecord = null; return; } - recordReader = reader.rowsOptions(options); + recordReader = reader.rowsOptions(options, conf); } } } @@ -1040,7 +1042,7 @@ public Options clone() { //required (on Tez) that base_x/ doesn't have a file for 'bucket' reader = OrcFile.createReader(bucketPath, OrcFile.readerOptions(conf)); pair = new ReaderPairAcid(key, reader, keyInterval.getMinKey(), keyInterval.getMaxKey(), - eventOptions); + eventOptions, conf); } else { pair = new EmptyReaderPair(); @@ -1050,7 +1052,7 @@ public Options clone() { else { assert reader != null : "no reader? " + mergerOptions.getRootPath(); pair = new ReaderPairAcid(key, reader, keyInterval.getMinKey(), keyInterval.getMaxKey(), - eventOptions); + eventOptions, conf); } } minKey = pair.getMinKey(); @@ -1107,7 +1109,7 @@ public Options clone() { //HIVE-17320: we should compute a SARG to push down min/max key to delete_delta Reader deltaReader = OrcFile.createReader(deltaFile, OrcFile.readerOptions(conf)); ReaderPair deltaPair = new ReaderPairAcid(key, deltaReader, minKey, maxKey, - deltaEventOptions); + deltaEventOptions, conf); if (deltaPair.nextRecord() != null) { readers.put(key, deltaPair); } @@ -1121,7 +1123,7 @@ public Options clone() { assert length >= 0; Reader deltaReader = OrcFile.createReader(deltaFile, OrcFile.readerOptions(conf).maxLength(length)); //must get statementId from file name since Acid 1.0 doesn't write it into bucketProperty - ReaderPairAcid deltaPair = new ReaderPairAcid(key, deltaReader, minKey, maxKey, deltaEventOptions); + ReaderPairAcid deltaPair = new ReaderPairAcid(key, deltaReader, minKey, maxKey, deltaEventOptions, conf); if (deltaPair.nextRecord() != null) { readers.put(key, deltaPair); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java index 7485e60..8fd9b90 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java @@ -20,6 +20,7 @@ import java.io.IOException; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -55,7 +56,16 @@ * @throws IOException */ RecordReader rowsOptions(Options options) throws IOException; - + + /** + * Create a RecordReader that reads everything with the given options. + * @param options the options to use + * @param conf conf object + * @return a new RecordReader + * @throws IOException + */ + RecordReader rowsOptions(Options options, Configuration conf) throws IOException; + /** * Create a RecordReader that will scan the entire file. * This is a legacy method and rowsOptions is preferred. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java index 1a6db1f..171b02b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.nio.ByteBuffer; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -73,11 +74,17 @@ public RecordReader rows() throws IOException { @Override public RecordReader rowsOptions(Options options) throws IOException { + return rowsOptions(options, null); + } + + @Override + public RecordReader rowsOptions(Options options, Configuration conf) throws IOException { LOG.info("Reading ORC rows from " + path + " with " + options); - return new RecordReaderImpl(this, options); + return new RecordReaderImpl(this, options, conf); } + @Override public RecordReader rows(boolean[] include) throws IOException { return rowsOptions(new Options().include(include)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java index 5b001a0..e068068 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java @@ -23,8 +23,11 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; @@ -48,6 +51,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.apache.orc.OrcFile; import org.apache.orc.TypeDescription; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -60,9 +64,15 @@ private long baseRow; protected RecordReaderImpl(ReaderImpl fileReader, - Reader.Options options) throws IOException { + Reader.Options options, final Configuration conf) throws IOException { super(fileReader, options); - batch = this.schema.createRowBatch(); + final boolean useDecimal64ColumnVectors = conf != null && HiveConf.getBoolVar(conf, + HiveConf.ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS); + if (useDecimal64ColumnVectors){ + batch = this.schema.createRowBatchV2(); + } else { + batch = this.schema.createRowBatch(); + } rowInBatch = 0; } @@ -80,8 +90,8 @@ boolean ensureBatch() throws IOException { return true; } - public VectorizedRowBatch createRowBatch() { - return this.schema.createRowBatch(); + public VectorizedRowBatch createRowBatch(boolean useDecimal64) { + return useDecimal64 ? this.schema.createRowBatchV2() : this.schema.createRowBatch(); } @Override @@ -393,7 +403,12 @@ static HiveDecimalWritable nextDecimal(ColumnVector vector, } else { result = (HiveDecimalWritable) previous; } - result.set(((DecimalColumnVector) vector).vector[row]); + if (vector instanceof Decimal64ColumnVector) { + long value = ((Decimal64ColumnVector) vector).vector[row]; + result.deserialize64(value, ((Decimal64ColumnVector) vector).scale); + } else { + result.set(((DecimalColumnVector) vector).vector[row]); + } return result; } else { return null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java index d2e1a68..8f5ecf0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java @@ -110,7 +110,7 @@ final Reader reader = OrcInputFormat.createOrcReaderForSplit(conf, (OrcSplit) inputSplit); // Careful with the range here now, we do not want to read the whole base file like deltas. - innerReader = reader.rowsOptions(readerOptions.range(offset, length)); + innerReader = reader.rowsOptions(readerOptions.range(offset, length), conf); baseReader = new org.apache.hadoop.mapred.RecordReader() { @Override @@ -143,7 +143,13 @@ public float getProgress() throws IOException { return innerReader.getProgress(); } }; - this.vectorizedRowBatchBase = ((RecordReaderImpl) innerReader).createRowBatch(); + final boolean useDecimal64ColumnVectors = HiveConf + .getBoolVar(conf, ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS); + if (useDecimal64ColumnVectors) { + this.vectorizedRowBatchBase = ((RecordReaderImpl) innerReader).createRowBatch(true); + } else { + this.vectorizedRowBatchBase = ((RecordReaderImpl) innerReader).createRowBatch(false); + } } /** @@ -859,10 +865,16 @@ public String toString() { private final boolean isBucketedTable; DeleteReaderValue(Reader deleteDeltaReader, Reader.Options readerOptions, int bucket, - ValidWriteIdList validWriteIdList, boolean isBucketedTable) throws IOException { - this.recordReader = deleteDeltaReader.rowsOptions(readerOptions); + ValidWriteIdList validWriteIdList, boolean isBucketedTable, final JobConf conf) throws IOException { + this.recordReader = deleteDeltaReader.rowsOptions(readerOptions, conf); this.bucketForSplit = bucket; - this.batch = deleteDeltaReader.getSchema().createRowBatch(); + final boolean useDecimal64ColumnVector = HiveConf.getBoolVar(conf, ConfVars + .HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS); + if (useDecimal64ColumnVector) { + this.batch = deleteDeltaReader.getSchema().createRowBatchV2(); + } else { + this.batch = deleteDeltaReader.getSchema().createRowBatch(); + } if (!recordReader.nextBatch(batch)) { // Read the first batch. this.batch = null; // Oh! the first batch itself was null. Close the reader. } @@ -1054,7 +1066,7 @@ public int compareTo(CompressedOwid other) { throw new DeleteEventsOverflowMemoryException(); } DeleteReaderValue deleteReaderValue = new DeleteReaderValue(deleteDeltaReader, - readerOptions, bucket, validWriteIdList, isBucketedTable); + readerOptions, bucket, validWriteIdList, isBucketedTable, conf); DeleteRecordKey deleteRecordKey = new DeleteRecordKey(); if (deleteReaderValue.next(deleteRecordKey)) { sortMerger.put(deleteRecordKey, deleteReaderValue); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java index c581bba..892fcc0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.InputFormatChecker; import org.apache.hadoop.hive.ql.io.SelfDescribingInputFormatInterface; @@ -99,7 +100,7 @@ options.include(OrcInputFormat.genIncludedColumns(schema, conf)); OrcInputFormat.setSearchArgument(options, types, conf, true); - this.reader = file.rowsOptions(options); + this.reader = file.rowsOptions(options, conf); int partitionColumnCount = rbCtx.getPartitionColumnCount(); if (partitionColumnCount > 0) { @@ -204,4 +205,9 @@ public boolean validateInput(FileSystem fs, HiveConf conf, } return true; } + + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return new VectorizedSupport.Support[] {VectorizedSupport.Support.DECIMAL_64}; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java index 71682af..b0102cc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java @@ -24,6 +24,8 @@ import java.util.Map; import java.util.Set; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -63,6 +65,7 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.orc.PhysicalWriter; +import org.apache.orc.TypeDescription; /** * An ORC file writer. The file is divided into stripes, which is the natural @@ -93,7 +96,14 @@ OrcFile.WriterOptions opts) throws IOException { super(fs, path, opts); this.inspector = opts.getInspector(); - this.internalBatch = opts.getSchema().createRowBatch(opts.getBatchSize()); + boolean useDecimal64ColumnVectors = opts.getConfiguration() != null && + HiveConf.getBoolVar(opts.getConfiguration(), HiveConf.ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS); + if (useDecimal64ColumnVectors) { + this.internalBatch = opts.getSchema().createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64, + opts.getBatchSize()); + } else { + this.internalBatch = opts.getSchema().createRowBatch(opts.getBatchSize()); + } this.fields = initializeFieldsFromOi(inspector); } @@ -207,9 +217,15 @@ static void setColumn(int rowId, ColumnVector column, break; } case DECIMAL: { - DecimalColumnVector vector = (DecimalColumnVector) column; - vector.set(rowId, ((HiveDecimalObjectInspector) inspector) + if (column instanceof Decimal64ColumnVector) { + Decimal64ColumnVector vector = (Decimal64ColumnVector) column; + vector.set(rowId, ((HiveDecimalObjectInspector) inspector) + .getPrimitiveWritableObject(obj)); + } else { + DecimalColumnVector vector = (DecimalColumnVector) column; + vector.set(rowId, ((HiveDecimalObjectInspector) inspector) .getPrimitiveWritableObject(obj)); + } break; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java index 646b214..3dc9079 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java @@ -17,20 +17,19 @@ */ package org.apache.hadoop.hive.ql.io.orc.encoded; +import org.apache.orc.OrcFile; import org.apache.orc.impl.RunLengthByteReader; import java.io.IOException; import java.util.Arrays; import java.util.List; -import org.apache.curator.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch; import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch; import org.apache.orc.CompressionCodec; import org.apache.orc.TypeDescription; -import org.apache.orc.TypeDescription.Category; import org.apache.orc.impl.InStream; import org.apache.orc.impl.PositionProvider; import org.apache.orc.impl.SettableUncompressedStream; @@ -1053,7 +1052,8 @@ private DecimalStreamReader(int columnId, int precision, int scale, boolean isFileCompressed, OrcProto.ColumnEncoding encoding, TreeReaderFactory.Context context, List vectors) throws IOException { - super(columnId, presentStream, valueStream, scaleStream, encoding, context); + super(columnId, presentStream, valueStream, scaleStream, encoding, + precision, scale, context); this._isFileCompressed = isFileCompressed; this._presentStream = presentStream; this._valueStream = valueStream; @@ -1201,6 +1201,147 @@ public static StreamReaderBuilder builder() { } } + protected static class Decimal64StreamReader extends Decimal64TreeReader implements SettableTreeReader { + private boolean _isFileCompressed; + private SettableUncompressedStream _presentStream; + private SettableUncompressedStream _valueStream; + private List vectors; + private int vectorIndex = 0; + + private Decimal64StreamReader(int columnId, int precision, int scale, + SettableUncompressedStream presentStream, + SettableUncompressedStream valueStream, + boolean isFileCompressed, + OrcProto.ColumnEncoding encoding, TreeReaderFactory.Context context, + List vectors) throws IOException { + super(columnId, presentStream, valueStream, encoding, + precision, scale, context); + this._isFileCompressed = isFileCompressed; + this._presentStream = presentStream; + this._valueStream = valueStream; + this.vectors = vectors; + } + + @Override + public void seek(PositionProvider index) throws IOException { + if (vectors != null) return; + if (present != null) { + if (_isFileCompressed) { + index.getNext(); + } + present.seek(index); + } + + // data stream could be empty stream or already reached end of stream before present stream. + // This can happen if all values in stream are nulls or last row group values are all null. + skipCompressedIndex(_isFileCompressed, index); + if (_valueStream.available() > 0) { + valueReader.seek(index); + } else { + skipSeek(index); + } + } + + @Override + public void nextVector( + ColumnVector previousVector, boolean[] isNull, int batchSize) throws IOException { + if (vectors == null) { + super.nextVector(previousVector, isNull, batchSize); + return; + } + vectors.get(vectorIndex++).shallowCopyTo(previousVector); + if (vectorIndex == vectors.size()) { + vectors = null; + } + } + + @Override + public void setBuffers(EncodedColumnBatch batch, boolean sameStripe) { + assert vectors == null; // See the comment in TimestampStreamReader.setBuffers. + ColumnStreamData[] streamsData = batch.getColumnData(columnId); + if (_presentStream != null) { + _presentStream.setBuffers(StreamUtils.createDiskRangeInfo(streamsData[OrcProto.Stream.Kind.PRESENT_VALUE])); + } + if (_valueStream != null) { + _valueStream.setBuffers(StreamUtils.createDiskRangeInfo(streamsData[OrcProto.Stream.Kind.DATA_VALUE])); + } + } + + public static class StreamReaderBuilder { + private int columnIndex; + private ColumnStreamData presentStream; + private ColumnStreamData valueStream; + private int scale; + private int precision; + private CompressionCodec compressionCodec; + private OrcProto.ColumnEncoding columnEncoding; + private List vectors; + private TreeReaderFactory.Context context; + + public StreamReaderBuilder setColumnIndex(int columnIndex) { + this.columnIndex = columnIndex; + return this; + } + + public StreamReaderBuilder setPrecision(int precision) { + this.precision = precision; + return this; + } + + public StreamReaderBuilder setScale(int scale) { + this.scale = scale; + return this; + } + + public StreamReaderBuilder setContext(TreeReaderFactory.Context context) { + this.context = context; + return this; + } + + public StreamReaderBuilder setPresentStream(ColumnStreamData presentStream) { + this.presentStream = presentStream; + return this; + } + + public StreamReaderBuilder setValueStream(ColumnStreamData valueStream) { + this.valueStream = valueStream; + return this; + } + + + public StreamReaderBuilder setCompressionCodec(CompressionCodec compressionCodec) { + this.compressionCodec = compressionCodec; + return this; + } + + public StreamReaderBuilder setColumnEncoding(OrcProto.ColumnEncoding encoding) { + this.columnEncoding = encoding; + return this; + } + + public Decimal64StreamReader build() throws IOException { + SettableUncompressedStream presentInStream = StreamUtils.createSettableUncompressedStream( + OrcProto.Stream.Kind.PRESENT.name(), presentStream); + + SettableUncompressedStream valueInStream = StreamUtils.createSettableUncompressedStream( + OrcProto.Stream.Kind.DATA.name(), valueStream); + + boolean isFileCompressed = compressionCodec != null; + return new Decimal64StreamReader(columnIndex, precision, scale, presentInStream, + valueInStream, isFileCompressed, columnEncoding, context, vectors); + } + + public StreamReaderBuilder setVectors(List vectors) { + this.vectors = vectors; + return this; + } + } + + public static StreamReaderBuilder builder() { + return new StreamReaderBuilder(); + } + } + protected static class DateStreamReader extends DateTreeReader implements SettableTreeReader { private boolean isFileCompressed; private SettableUncompressedStream _presentStream; @@ -2102,8 +2243,8 @@ public static StreamReaderBuilder builder() { } public static StructTreeReader createRootTreeReader(TypeDescription[] batchSchemas, - List encodings, OrcEncodedColumnBatch batch, - CompressionCodec codec, TreeReaderFactory.Context context) throws IOException { + List encodings, OrcEncodedColumnBatch batch, + CompressionCodec codec, Context context, final boolean useDecimal64ColumnVectors) throws IOException { // Note: we only look at the schema here to deal with complex types. Somebody has set up the // reader with whatever ideas they had to the schema and we just trust the reader to // produce the CVBs that was asked for. However, we only need to look at top level columns. @@ -2118,7 +2259,8 @@ public static StructTreeReader createRootTreeReader(TypeDescription[] batchSchem if (!batch.hasData(batchColIx) && !batch.hasVectors(batchColIx)) { throw new AssertionError("No data for column " + batchColIx + ": " + batchSchemas[i]); } - childReaders[i] = createEncodedTreeReader(batchSchemas[i], encodings, batch, codec, context); + childReaders[i] = createEncodedTreeReader(batchSchemas[i], encodings, batch, codec, context, + useDecimal64ColumnVectors); } // TODO: do we actually need this reader? the caller just extracts child readers. @@ -2139,8 +2281,8 @@ private static void skipSeek(PositionProvider index) { private static TreeReader createEncodedTreeReader(TypeDescription schema, - List encodings, OrcEncodedColumnBatch batch, - CompressionCodec codec, TreeReaderFactory.Context context) throws IOException { + List encodings, OrcEncodedColumnBatch batch, + CompressionCodec codec, Context context, final boolean useDecimal64ColumnVectors) throws IOException { int columnIndex = schema.getId(); ColumnStreamData[] streamBuffers = null; List vectors = null; @@ -2201,12 +2343,12 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, case TIMESTAMP: case DATE: return getPrimitiveTreeReader(columnIndex, schema, codec, columnEncoding, - present, data, dictionary, lengths, secondary, context, vectors); + present, data, dictionary, lengths, secondary, context, vectors, useDecimal64ColumnVectors); case LIST: assert vectors == null; // Not currently supported. TypeDescription elementType = schema.getChildren().get(0); TreeReader elementReader = createEncodedTreeReader( - elementType, encodings, batch, codec, context); + elementType, encodings, batch, codec, context, useDecimal64ColumnVectors); return ListStreamReader.builder() .setColumnIndex(columnIndex) .setColumnEncoding(columnEncoding) @@ -2221,9 +2363,9 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, TypeDescription keyType = schema.getChildren().get(0); TypeDescription valueType = schema.getChildren().get(1); TreeReader keyReader = createEncodedTreeReader( - keyType, encodings, batch, codec, context); + keyType, encodings, batch, codec, context, useDecimal64ColumnVectors); TreeReader valueReader = createEncodedTreeReader( - valueType, encodings, batch, codec, context); + valueType, encodings, batch, codec, context, useDecimal64ColumnVectors); return MapStreamReader.builder() .setColumnIndex(columnIndex) .setColumnEncoding(columnEncoding) @@ -2241,7 +2383,7 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, for (int i = 0; i < childCount; i++) { TypeDescription childType = schema.getChildren().get(i); childReaders[i] = createEncodedTreeReader( - childType, encodings, batch, codec, context); + childType, encodings, batch, codec, context, useDecimal64ColumnVectors); } return StructStreamReader.builder() .setColumnIndex(columnIndex) @@ -2259,7 +2401,7 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, for (int i = 0; i < childCount; i++) { TypeDescription childType = schema.getChildren().get(i); childReaders[i] = createEncodedTreeReader( - childType, encodings, batch, codec, context); + childType, encodings, batch, codec, context, useDecimal64ColumnVectors); } return UnionStreamReader.builder() .setColumnIndex(columnIndex) @@ -2277,10 +2419,10 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, } private static TreeReader getPrimitiveTreeReader(final int columnIndex, - TypeDescription columnType, CompressionCodec codec, OrcProto.ColumnEncoding columnEncoding, - ColumnStreamData present, ColumnStreamData data, ColumnStreamData dictionary, - ColumnStreamData lengths, ColumnStreamData secondary, TreeReaderFactory.Context context, - List vectors) throws IOException { + TypeDescription columnType, CompressionCodec codec, OrcProto.ColumnEncoding columnEncoding, + ColumnStreamData present, ColumnStreamData data, ColumnStreamData dictionary, + ColumnStreamData lengths, ColumnStreamData secondary, Context context, + List vectors, final boolean useDecimal64ColumnVectors) throws IOException { switch (columnType.getCategory()) { case BINARY: return BinaryStreamReader.builder() @@ -2391,7 +2533,21 @@ private static TreeReader getPrimitiveTreeReader(final int columnIndex, .setVectors(vectors) .build(); case DECIMAL: - return DecimalStreamReader.builder() + if ((context.getFileFormat() == OrcFile.Version.UNSTABLE_PRE_2_0 && + columnType.getPrecision() <= TypeDescription.MAX_DECIMAL64_PRECISION)) { + return Decimal64StreamReader.builder() + .setColumnIndex(columnIndex) + .setPrecision(columnType.getPrecision()) + .setScale(columnType.getScale()) + .setPresentStream(present) + .setValueStream(data) + .setCompressionCodec(codec) + .setColumnEncoding(columnEncoding) + .setVectors(vectors) + .setContext(context) + .build(); + } else { + return DecimalStreamReader.builder() .setColumnIndex(columnIndex) .setPrecision(columnType.getPrecision()) .setScale(columnType.getScale()) @@ -2403,6 +2559,7 @@ private static TreeReader getPrimitiveTreeReader(final int columnIndex, .setVectors(vectors) .setContext(context) .build(); + } case TIMESTAMP: return TimestampStreamReader.builder() .setColumnIndex(columnIndex) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java index ed6d577..5e70a05 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.common.io.FileMetadataCache; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.InputFormatChecker; import org.apache.hadoop.hive.ql.io.LlapCacheOnlyInputFormatInterface; @@ -115,4 +116,9 @@ public boolean validateInput(FileSystem fs, HiveConf conf, List file return true; } + + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return null; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index 394f826..a9cbdc9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -20,6 +20,7 @@ import static org.apache.hadoop.hive.ql.plan.ReduceSinkDesc.ReducerTraits.UNIFORM; +import java.io.IOException; import java.io.Serializable; import java.lang.annotation.Annotation; import java.util.ArrayList; @@ -40,6 +41,7 @@ import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.exec.vector.reducesink.*; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.parse.spark.SparkPartitionPruningSinkOperator; @@ -1207,6 +1209,14 @@ private void determineDataColumnNums(TableScanOperator tableScanOperator, private Support[] getVectorizedInputFormatSupports( Class inputFileFormatClass) { + try { + InputFormat inputFormat = FetchOperator.getInputFormatFromCache(inputFileFormatClass, hiveConf); + if (inputFormat instanceof VectorizedInputFormatInterface) { + return ((VectorizedInputFormatInterface) inputFormat).getSupportedFeatures(); + } + } catch (IOException e) { + LOG.error("Unable to instantiate {} input format class. Cannot determine vectorization support.", e); + } // FUTURE: Decide how to ask an input file format what vectorization features it supports. return null; } @@ -1830,11 +1840,12 @@ private void validateAndVectorizeMapWork(MapWork mapWork, VectorTaskColumnInfo v supportRemovedReasons.add(removeString); } - // And, if LLAP is enabled for now, disable DECIMAL_64; - if (isLlapIoEnabled && supportSet.contains(Support.DECIMAL_64)) { + // disable DECIMAL_64 if requested via config; + if (!hiveConf.getBoolVar(ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS) && + supportSet.contains(Support.DECIMAL_64)) { supportSet.remove(Support.DECIMAL_64); String removeString = - "DECIMAL_64 disabled because LLAP is enabled"; + "DECIMAL_64 disabled as hive.exec.orc.use.decimal64.column.vectors is set to false"; supportRemovedReasons.add(removeString); } @@ -4386,6 +4397,9 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { if (index < size) { vectorSelectExprs = Arrays.copyOf(vectorSelectExprs, index); } + // if parent output type have NONE and if both child are decimal type with one DECIMAL_64 and other NONE then, + // insert a cast for the child with DECIMAL_64 + VectorExpression[] vse = vContext.getVectorExpressionsUpConvertDecimal64(colList); vectorSelectDesc.setSelectExpressions(vectorSelectExprs); vectorSelectDesc.setProjectedOutputColumns(projectedOutputColumns); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java index af43b14..c15c5a6 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java @@ -194,8 +194,8 @@ public void testCTAS() throws Exception { "'='true', 'transactional_properties'='default') as select a, b from " + Table.ACIDTBL); rs = runStatementOnDriver("select ROW__ID, a, b, INPUT__FILE__NAME from myctas2 order by ROW__ID"); String expected2[][] = { - {"{\"writeid\":1,\"bucketid\":536870912,\"rowid\":0}\t3\t4", "warehouse/myctas2/delta_0000001_0000001_0000/bucket_00000"}, - {"{\"writeid\":1,\"bucketid\":536936448,\"rowid\":0}\t1\t2", "warehouse/myctas2/delta_0000001_0000001_0000/bucket_00001"} + {"{\"writeid\":1,\"bucketid\":536870912,\"rowid\":0}\t1\t2", "warehouse/myctas2/delta_0000001_0000001_0000/bucket_00000"}, + {"{\"writeid\":1,\"bucketid\":536936448,\"rowid\":0}\t3\t4", "warehouse/myctas2/delta_0000001_0000001_0000/bucket_00001"} }; checkExpected(rs, expected2, "Unexpected row count after ctas from acid table"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java index fb2335a..c5e1b33 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java @@ -229,7 +229,7 @@ static String getColumnNamesProperty() { return "booleanValue,byteValue,shortValue,intValue,longValue,floatValue,doubleValue,stringValue,decimalValue,dateValue,timestampValue"; } static String getColumnTypesProperty() { - return "boolean:tinyint:smallint:int:bigint:float:double:string:decimal:date:timestamp"; + return "boolean:tinyint:smallint:int:bigint:float:double:string:decimal(38,18):date:timestamp"; } } @@ -2547,14 +2547,14 @@ public void testCombinationInputFormatWithAcid() throws Exception { assertEquals("mock:/combinationAcid/p=0/base_0000010/bucket_00000", split.getPath().toString()); assertEquals(0, split.getStart()); - assertEquals(677, split.getLength()); + assertEquals(700, split.getLength()); split = (HiveInputFormat.HiveInputSplit) splits[1]; assertEquals("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", split.inputFormatClassName()); assertEquals("mock:/combinationAcid/p=0/base_0000010/bucket_00001", split.getPath().toString()); assertEquals(0, split.getStart()); - assertEquals(703, split.getLength()); + assertEquals(724, split.getLength()); CombineHiveInputFormat.CombineHiveInputSplit combineSplit = (CombineHiveInputFormat.CombineHiveInputSplit) splits[2]; assertEquals(BUCKETS, combineSplit.getNumPaths()); @@ -2562,7 +2562,7 @@ public void testCombinationInputFormatWithAcid() throws Exception { assertEquals("mock:/combinationAcid/p=1/00000" + bucket + "_0", combineSplit.getPath(bucket).toString()); assertEquals(0, combineSplit.getOffset(bucket)); - assertEquals(241, combineSplit.getLength(bucket)); + assertEquals(251, combineSplit.getLength(bucket)); } String[] hosts = combineSplit.getLocations(); assertEquals(2, hosts.length); @@ -3847,9 +3847,10 @@ public void testRowNumberUniquenessInDifferentSplits() throws Exception { * Test schema evolution when using the reader directly. */ @Test - public void testSchemaEvolution() throws Exception { + public void testSchemaEvolutionOldDecimal() throws Exception { TypeDescription fileSchema = TypeDescription.fromString("struct,d:string>"); + conf.setBoolean(ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS.varname, false); Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf) .fileSystem(fs) @@ -3915,6 +3916,78 @@ public void testSchemaEvolution() throws Exception { } /** + * Test schema evolution when using the reader directly. + */ + @Test + public void testSchemaEvolutionDecimal64() throws Exception { + TypeDescription fileSchema = + TypeDescription.fromString("struct,d:string>"); + conf.setBoolean(ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS.varname, true); + Writer writer = OrcFile.createWriter(testFilePath, + OrcFile.writerOptions(conf) + .fileSystem(fs) + .setSchema(fileSchema) + .compress(org.apache.orc.CompressionKind.NONE)); + VectorizedRowBatch batch = fileSchema.createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64,1000); + batch.size = 1000; + LongColumnVector lcv = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[0]); + for(int r=0; r < 1000; r++) { + ((LongColumnVector) batch.cols[0]).vector[r] = r * 42; + lcv.vector[r] = r * 10001; + ((BytesColumnVector) batch.cols[2]).setVal(r, + Integer.toHexString(r).getBytes(StandardCharsets.UTF_8)); + } + writer.addRowBatch(batch); + writer.close(); + TypeDescription readerSchema = TypeDescription.fromString( + "struct,d:string,future2:int>"); + Reader reader = OrcFile.createReader(testFilePath, + OrcFile.readerOptions(conf).filesystem(fs)); + RecordReader rows = reader.rowsOptions(new Reader.Options() + .schema(readerSchema)); + batch = readerSchema.createRowBatchV2(); + lcv = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[0]); + LongColumnVector future1 = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[1]); + assertEquals(true, rows.nextBatch(batch)); + assertEquals(1000, batch.size); + assertEquals(true, future1.isRepeating); + assertEquals(true, future1.isNull[0]); + assertEquals(true, batch.cols[3].isRepeating); + assertEquals(true, batch.cols[3].isNull[0]); + for(int r=0; r < batch.size; ++r) { + assertEquals("row " + r, r * 42, ((LongColumnVector) batch.cols[0]).vector[r]); + assertEquals("row " + r, r * 10001, lcv.vector[r]); + assertEquals("row " + r, r * 10001, lcv.vector[r]); + assertEquals("row " + r, Integer.toHexString(r), + ((BytesColumnVector) batch.cols[2]).toString(r)); + } + assertEquals(false, rows.nextBatch(batch)); + rows.close(); + + // try it again with an include vector + rows = reader.rowsOptions(new Reader.Options() + .schema(readerSchema) + .include(new boolean[]{false, true, true, true, false, false, true})); + batch = readerSchema.createRowBatchV2(); + lcv = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[0]); + future1 = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[1]); + assertEquals(true, rows.nextBatch(batch)); + assertEquals(1000, batch.size); + assertEquals(true, future1.isRepeating); + assertEquals(true, future1.isNull[0]); + assertEquals(true, batch.cols[3].isRepeating); + assertEquals(true, batch.cols[3].isNull[0]); + assertEquals(true, batch.cols[2].isRepeating); + assertEquals(true, batch.cols[2].isNull[0]); + for(int r=0; r < batch.size; ++r) { + assertEquals("row " + r, r * 42, ((LongColumnVector) batch.cols[0]).vector[r]); + assertEquals("row " + r, r * 10001, lcv.vector[r]); + } + assertEquals(false, rows.nextBatch(batch)); + rows.close(); + } + + /** * Test column projection when using ACID. */ @Test @@ -3933,7 +4006,7 @@ public void testColumnProjectionWithAcid() throws Exception { .fileSystem(fs) .setSchema(fileSchema) .compress(org.apache.orc.CompressionKind.NONE)); - VectorizedRowBatch batch = fileSchema.createRowBatch(1000); + VectorizedRowBatch batch = fileSchema.createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64,1000); batch.size = 1000; StructColumnVector scv = (StructColumnVector)batch.cols[5]; // operation @@ -4047,7 +4120,7 @@ public void testAcidReadPastLastStripeOffset() throws Exception { .stripeSize(128); // Create ORC file with small stripe size so we can write multiple stripes. Writer writer = OrcFile.createWriter(testFilePath, options); - VectorizedRowBatch batch = fileSchema.createRowBatch(1000); + VectorizedRowBatch batch = fileSchema.createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64,1000); batch.size = 1000; StructColumnVector scv = (StructColumnVector)batch.cols[5]; // operation diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java index ef678a8..dc0da9c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java @@ -698,13 +698,13 @@ public void testStringAndBinaryStatistics() throws Exception { assertEquals(3, stats[1].getNumberOfValues()); assertEquals(15, ((BinaryColumnStatistics) stats[1]).getSum()); - assertEquals("count: 3 hasNull: true sum: 15", stats[1].toString()); + assertEquals("count: 3 hasNull: true bytesOnDisk: 28 sum: 15", stats[1].toString()); assertEquals(3, stats[2].getNumberOfValues()); assertEquals("bar", ((StringColumnStatistics) stats[2]).getMinimum()); assertEquals("hi", ((StringColumnStatistics) stats[2]).getMaximum()); assertEquals(8, ((StringColumnStatistics) stats[2]).getSum()); - assertEquals("count: 3 hasNull: true min: bar max: hi sum: 8", + assertEquals("count: 3 hasNull: true bytesOnDisk: 22 min: bar max: hi sum: 8", stats[2].toString()); // check the inspectors @@ -917,13 +917,13 @@ public void test1() throws Exception { assertEquals(2, stats[1].getNumberOfValues()); assertEquals(1, ((BooleanColumnStatistics) stats[1]).getFalseCount()); assertEquals(1, ((BooleanColumnStatistics) stats[1]).getTrueCount()); - assertEquals("count: 2 hasNull: false true: 1", stats[1].toString()); + assertEquals("count: 2 hasNull: false bytesOnDisk: 5 true: 1", stats[1].toString()); assertEquals(2048, ((IntegerColumnStatistics) stats[3]).getMaximum()); assertEquals(1024, ((IntegerColumnStatistics) stats[3]).getMinimum()); assertEquals(true, ((IntegerColumnStatistics) stats[3]).isSumDefined()); assertEquals(3072, ((IntegerColumnStatistics) stats[3]).getSum()); - assertEquals("count: 2 hasNull: false min: 1024 max: 2048 sum: 3072", + assertEquals("count: 2 hasNull: false bytesOnDisk: 9 min: 1024 max: 2048 sum: 3072", stats[3].toString()); StripeStatistics ss = reader.getStripeStatistics().get(0); @@ -935,10 +935,10 @@ public void test1() throws Exception { assertEquals(-15.0, ((DoubleColumnStatistics) stats[7]).getMinimum()); assertEquals(-5.0, ((DoubleColumnStatistics) stats[7]).getMaximum()); assertEquals(-20.0, ((DoubleColumnStatistics) stats[7]).getSum(), 0.00001); - assertEquals("count: 2 hasNull: false min: -15.0 max: -5.0 sum: -20.0", + assertEquals("count: 2 hasNull: false bytesOnDisk: 15 min: -15.0 max: -5.0 sum: -20.0", stats[7].toString()); - assertEquals("count: 2 hasNull: false min: bye max: hi sum: 5", stats[9].toString()); + assertEquals("count: 2 hasNull: false bytesOnDisk: 14 min: bye max: hi sum: 5", stats[9].toString()); // check the inspectors StructObjectInspector readerInspector = @@ -1711,6 +1711,7 @@ public void testSeek() throws Exception { row = (OrcStruct) rows.next(row); BigRow expected = createRandomRow(intValues, doubleValues, stringValues, byteValues, words, i); + //assertEquals(expected, row); assertEquals(expected.boolean1.booleanValue(), ((BooleanWritable) row.getFieldValue(0)).get()); assertEquals(expected.byte1.byteValue(), diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java index d8a7af8..cc29384 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java @@ -168,7 +168,7 @@ private Reader createMockReader() throws IOException { setRow(row4, OrcRecordUpdater.INSERT_OPERATION, 40, 50, 60, 130, "fourth"); OrcStruct row5 = new OrcStruct(OrcRecordUpdater.FIELDS); setRow(row5, OrcRecordUpdater.INSERT_OPERATION, 40, 50, 61, 140, "fifth"); - Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class))) + Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class), Mockito.any(HiveConf.class))) .thenReturn(recordReader); Mockito.when(recordReader.hasNext()). @@ -192,7 +192,7 @@ public void testReaderPair() throws Exception { RecordIdentifier minKey = new RecordIdentifier(10, 20, 30); RecordIdentifier maxKey = new RecordIdentifier(40, 50, 60); ReaderPair pair = new OrcRawRecordMerger.ReaderPairAcid(key, reader, minKey, maxKey, - new Reader.Options()); + new Reader.Options(), new HiveConf()); RecordReader recordReader = pair.getRecordReader(); assertEquals(10, key.getWriteId()); assertEquals(20, key.getBucketProperty()); @@ -218,7 +218,7 @@ public void testReaderPairNoMin() throws Exception { Reader reader = createMockReader(); ReaderPair pair = new OrcRawRecordMerger.ReaderPairAcid(key, reader, null, null, - new Reader.Options()); + new Reader.Options(), new HiveConf()); RecordReader recordReader = pair.getRecordReader(); assertEquals(10, key.getWriteId()); assertEquals(20, key.getBucketProperty()); @@ -274,7 +274,7 @@ private Reader createMockOriginalReader() throws IOException { OrcStruct row4 = createOriginalRow("fourth"); OrcStruct row5 = createOriginalRow("fifth"); - Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class))) + Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class), Mockito.any(HiveConf.class))) .thenReturn(recordReader); Mockito.when(recordReader.hasNext()). thenReturn(true, true, true, true, true, false); @@ -410,7 +410,7 @@ public void testNewBase() throws Exception { types.add(typeBuilder.build()); Mockito.when(reader.getTypes()).thenReturn(types); - Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class))) + Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class), Mockito.any(HiveConf.class))) .thenReturn(recordReader); OrcStruct row1 = new OrcStruct(OrcRecordUpdater.FIELDS); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java index 1533ffa..81d2e2d 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java @@ -231,13 +231,13 @@ public void testStringAndBinaryStatistics() throws Exception { assertEquals(3, stats[1].getNumberOfValues()); assertEquals(15, ((BinaryColumnStatistics) stats[1]).getSum()); - assertEquals("count: 3 hasNull: true sum: 15", stats[1].toString()); + assertEquals("count: 3 hasNull: true bytesOnDisk: 28 sum: 15", stats[1].toString()); assertEquals(3, stats[2].getNumberOfValues()); assertEquals("bar", ((StringColumnStatistics) stats[2]).getMinimum()); assertEquals("hi", ((StringColumnStatistics) stats[2]).getMaximum()); assertEquals(8, ((StringColumnStatistics) stats[2]).getSum()); - assertEquals("count: 3 hasNull: true min: bar max: hi sum: 8", + assertEquals("count: 3 hasNull: true bytesOnDisk: 22 min: bar max: hi sum: 8", stats[2].toString()); // check the inspectors @@ -455,13 +455,13 @@ public void testOrcSerDeStatsComplex() throws Exception { assertEquals(2, stats[1].getNumberOfValues()); assertEquals(1, ((BooleanColumnStatistics) stats[1]).getFalseCount()); assertEquals(1, ((BooleanColumnStatistics) stats[1]).getTrueCount()); - assertEquals("count: 2 hasNull: false true: 1", stats[1].toString()); + assertEquals("count: 2 hasNull: false bytesOnDisk: 5 true: 1", stats[1].toString()); assertEquals(2048, ((IntegerColumnStatistics) stats[3]).getMaximum()); assertEquals(1024, ((IntegerColumnStatistics) stats[3]).getMinimum()); assertEquals(true, ((IntegerColumnStatistics) stats[3]).isSumDefined()); assertEquals(3072, ((IntegerColumnStatistics) stats[3]).getSum()); - assertEquals("count: 2 hasNull: false min: 1024 max: 2048 sum: 3072", + assertEquals("count: 2 hasNull: false bytesOnDisk: 9 min: 1024 max: 2048 sum: 3072", stats[3].toString()); assertEquals(Long.MAX_VALUE, @@ -469,16 +469,16 @@ public void testOrcSerDeStatsComplex() throws Exception { assertEquals(Long.MAX_VALUE, ((IntegerColumnStatistics) stats[5]).getMinimum()); assertEquals(false, ((IntegerColumnStatistics) stats[5]).isSumDefined()); - assertEquals("count: 2 hasNull: false min: 9223372036854775807 max: 9223372036854775807", + assertEquals("count: 2 hasNull: false bytesOnDisk: 12 min: 9223372036854775807 max: 9223372036854775807", stats[5].toString()); assertEquals(-15.0, ((DoubleColumnStatistics) stats[7]).getMinimum()); assertEquals(-5.0, ((DoubleColumnStatistics) stats[7]).getMaximum()); assertEquals(-20.0, ((DoubleColumnStatistics) stats[7]).getSum(), 0.00001); - assertEquals("count: 2 hasNull: false min: -15.0 max: -5.0 sum: -20.0", + assertEquals("count: 2 hasNull: false bytesOnDisk: 15 min: -15.0 max: -5.0 sum: -20.0", stats[7].toString()); - assertEquals("count: 2 hasNull: false min: bye max: hi sum: 5", stats[9].toString()); + assertEquals("count: 2 hasNull: false bytesOnDisk: 14 min: bye max: hi sum: 5", stats[9].toString()); } @Test @@ -548,13 +548,13 @@ public void testOrcSerDeStatsComplexOldFormat() throws Exception { assertEquals(2, stats[1].getNumberOfValues()); assertEquals(1, ((BooleanColumnStatistics) stats[1]).getFalseCount()); assertEquals(1, ((BooleanColumnStatistics) stats[1]).getTrueCount()); - assertEquals("count: 2 hasNull: false true: 1", stats[1].toString()); + assertEquals("count: 2 hasNull: false bytesOnDisk: 5 true: 1", stats[1].toString()); assertEquals(2048, ((IntegerColumnStatistics) stats[3]).getMaximum()); assertEquals(1024, ((IntegerColumnStatistics) stats[3]).getMinimum()); assertEquals(true, ((IntegerColumnStatistics) stats[3]).isSumDefined()); assertEquals(3072, ((IntegerColumnStatistics) stats[3]).getSum()); - assertEquals("count: 2 hasNull: false min: 1024 max: 2048 sum: 3072", + assertEquals("count: 2 hasNull: false bytesOnDisk: 8 min: 1024 max: 2048 sum: 3072", stats[3].toString()); assertEquals(Long.MAX_VALUE, @@ -562,22 +562,22 @@ public void testOrcSerDeStatsComplexOldFormat() throws Exception { assertEquals(Long.MAX_VALUE, ((IntegerColumnStatistics) stats[5]).getMinimum()); assertEquals(false, ((IntegerColumnStatistics) stats[5]).isSumDefined()); - assertEquals("count: 2 hasNull: false min: 9223372036854775807 max: 9223372036854775807", + assertEquals("count: 2 hasNull: false bytesOnDisk: 12 min: 9223372036854775807 max: 9223372036854775807", stats[5].toString()); assertEquals(-15.0, ((DoubleColumnStatistics) stats[7]).getMinimum()); assertEquals(-5.0, ((DoubleColumnStatistics) stats[7]).getMaximum()); assertEquals(-20.0, ((DoubleColumnStatistics) stats[7]).getSum(), 0.00001); - assertEquals("count: 2 hasNull: false min: -15.0 max: -5.0 sum: -20.0", + assertEquals("count: 2 hasNull: false bytesOnDisk: 15 min: -15.0 max: -5.0 sum: -20.0", stats[7].toString()); assertEquals(5, ((BinaryColumnStatistics) stats[8]).getSum()); - assertEquals("count: 2 hasNull: false sum: 5", stats[8].toString()); + assertEquals("count: 2 hasNull: false bytesOnDisk: 14 sum: 5", stats[8].toString()); assertEquals("bye", ((StringColumnStatistics) stats[9]).getMinimum()); assertEquals("hi", ((StringColumnStatistics) stats[9]).getMaximum()); assertEquals(5, ((StringColumnStatistics) stats[9]).getSum()); - assertEquals("count: 2 hasNull: false min: bye max: hi sum: 5", stats[9].toString()); + assertEquals("count: 2 hasNull: false bytesOnDisk: 20 min: bye max: hi sum: 5", stats[9].toString()); } @Test(expected = ClassCastException.class) diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java index 0c9c95d..c23f00e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java @@ -49,6 +49,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.apache.orc.TypeDescription; import org.junit.Before; import org.junit.Test; @@ -151,7 +152,7 @@ private void checkVectorizedReader() throws Exception { OrcFile.readerOptions(conf)); RecordReaderImpl vrr = (RecordReaderImpl) vreader.rows(); RecordReaderImpl rr = (RecordReaderImpl) reader.rows(); - VectorizedRowBatch batch = reader.getSchema().createRowBatch(); + VectorizedRowBatch batch = reader.getSchema().createRowBatchV2(); OrcStruct row = null; // Check Vectorized ORC reader against ORC row reader diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java index e478371..551e5ca 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java @@ -226,7 +226,7 @@ private void testVectorizedOrcAcidRowBatchReader(String deleteEventRegistry) thr assertTrue(vectorizedReader.getDeleteEventRegistry() instanceof SortMergedDeleteEventRegistry); } TypeDescription schema = OrcInputFormat.getDesiredRowTypeDescr(conf, true, Integer.MAX_VALUE); - VectorizedRowBatch vectorizedRowBatch = schema.createRowBatch(); + VectorizedRowBatch vectorizedRowBatch = schema.createRowBatchV2(); vectorizedRowBatch.setPartitionInfo(1, 0); // set data column count as 1. long previousPayload = Long.MIN_VALUE; while (vectorizedReader.next(null, vectorizedRowBatch)) { diff --git a/ql/src/test/queries/clientpositive/llap_acid2.q b/ql/src/test/queries/clientpositive/llap_acid2.q index a409c26..cd06d31 100644 --- a/ql/src/test/queries/clientpositive/llap_acid2.q +++ b/ql/src/test/queries/clientpositive/llap_acid2.q @@ -29,18 +29,27 @@ CREATE TABLE orc_llap_n2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='true'); + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='true','orc.write.format'='UNSTABLE-PRE-2.0'); insert into table orc_llap_n2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30; - + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30; +alter table orc_llap_n2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12'); +insert into table orc_llap_n2 +select cint, cbigint, cfloat, cdouble, + cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, + cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, + cstring1, cfloat as c9, cast("3.321" as decimal(10,3))as c10, + cast("9.987654321" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30; CREATE TABLE orc_llap2 ( @@ -57,18 +66,22 @@ CREATE TABLE orc_llap2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='false'); + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='false', 'orc.write.format'='UNSTABLE-PRE-2.0'); insert into table orc_llap2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30; + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30; -alter table orc_llap2 set TBLPROPERTIES ('transactional'='true'); +alter table orc_llap2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12'); -update orc_llap2 set cstring1 = 'testvalue' where cstring1 = 'N016jPED08o'; +update orc_llap2 set cstring1 = 'testvalue', cdecimal1 = cast("3.321" as decimal(10,3)), +cdecimal2 = cast("9.987654321" as decimal(38,18)) where cstring1 = 'N016jPED08o'; SET hive.llap.io.enabled=true; diff --git a/ql/src/test/queries/clientpositive/llap_decimal64_reader.q b/ql/src/test/queries/clientpositive/llap_decimal64_reader.q new file mode 100644 index 0000000..c602ed6 --- /dev/null +++ b/ql/src/test/queries/clientpositive/llap_decimal64_reader.q @@ -0,0 +1,54 @@ +--! qt:dataset:alltypesorc +SET hive.vectorized.execution.enabled=true; + +SET hive.llap.io.enabled=false; + +SET hive.exec.orc.default.row.index.stride=1000; +SET hive.optimize.index.filter=true; +set hive.auto.convert.join=false; + +DROP TABLE orc_llap_n0; + +-- this test mix and matches orc versions and flips config to use decimal64 column vectors +set hive.auto.convert.join=true; +SET hive.llap.io.enabled=true; +CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE"); + +insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; + +alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0'); + +insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("4.456" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; + +set hive.exec.orc.use.decimal64.column.vectors=true; +explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; +select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; + +set hive.exec.orc.use.decimal64.column.vectors=false; +explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; +select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; + +DROP TABLE orc_llap_n0; diff --git a/ql/src/test/queries/clientpositive/llap_uncompressed.q b/ql/src/test/queries/clientpositive/llap_uncompressed.q index 875356c..de3cdc6 100644 --- a/ql/src/test/queries/clientpositive/llap_uncompressed.q +++ b/ql/src/test/queries/clientpositive/llap_uncompressed.q @@ -24,13 +24,20 @@ CREATE TABLE orc_llap_n0( ctimestamp1 TIMESTAMP, ctimestamp2 TIMESTAMP, cboolean1 BOOLEAN, - cboolean2 BOOLEAN) + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) STORED AS ORC tblproperties ("orc.compress"="NONE"); insert into table orc_llap_n0 -select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 -from alltypesorc; +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; +alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0'); + +insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; SET hive.llap.io.enabled=true; diff --git a/ql/src/test/results/clientpositive/acid_mapjoin.q.out b/ql/src/test/results/clientpositive/acid_mapjoin.q.out index 76a781e..5569a03 100644 --- a/ql/src/test/results/clientpositive/acid_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/acid_mapjoin.q.out @@ -73,21 +73,21 @@ STAGE PLANS: Stage: Stage-5 Map Reduce Local Work Alias -> Map Local Tables: - $hdt$_0:acid1 + $hdt$_1:acid2 Fetch Operator limit: -1 Alias -> Map Local Operator Tree: - $hdt$_0:acid1 + $hdt$_1:acid2 TableScan - alias: acid1 - Statistics: Num rows: 316 Data size: 1265 Basic stats: COMPLETE Column stats: NONE + alias: acid2 + Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 316 Data size: 1265 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 316 Data size: 1265 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: int) @@ -97,15 +97,15 @@ STAGE PLANS: Map Reduce Map Operator Tree: TableScan - alias: acid2 - Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: NONE + alias: acid1 + Statistics: Num rows: 316 Data size: 1265 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 316 Data size: 1265 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 316 Data size: 1265 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 diff --git a/ql/src/test/results/clientpositive/acid_nullscan.q.out b/ql/src/test/results/clientpositive/acid_nullscan.q.out index 6dad497..ccd33ae 100644 --- a/ql/src/test/results/clientpositive/acid_nullscan.q.out +++ b/ql/src/test/results/clientpositive/acid_nullscan.q.out @@ -42,12 +42,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_vectorized_n1 - Statistics: Num rows: 88 Data size: 25400 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 90 Data size: 26090 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: false (type: boolean) - Statistics: Num rows: 1 Data size: 288 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 289 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(a) mode: hash @@ -82,7 +82,7 @@ STAGE PLANS: serialization.ddl struct acid_vectorized_n1 { i32 a, string b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 2540 + totalSize 2609 transactional true transactional_properties default #### A masked pattern was here #### @@ -104,7 +104,7 @@ STAGE PLANS: serialization.ddl struct acid_vectorized_n1 { i32 a, string b} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2540 + totalSize 2609 transactional true transactional_properties default #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/acid_table_stats.q.out b/ql/src/test/results/clientpositive/acid_table_stats.q.out index 2596922..b266794 100644 --- a/ql/src/test/results/clientpositive/acid_table_stats.q.out +++ b/ql/src/test/results/clientpositive/acid_table_stats.q.out @@ -94,7 +94,7 @@ Table: acid #### A masked pattern was here #### Partition Parameters: numFiles 2 - totalSize 4009 + totalSize 4064 #### A masked pattern was here #### # Storage Information @@ -132,9 +132,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid - Statistics: Num rows: 82 Data size: 40090 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 83 Data size: 40640 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 82 Data size: 40090 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 83 Data size: 40640 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -210,7 +210,7 @@ Partition Parameters: numFiles 2 numRows 1000 rawDataSize 208000 - totalSize 4009 + totalSize 4064 #### A masked pattern was here #### # Storage Information @@ -261,7 +261,7 @@ Partition Parameters: numFiles 2 numRows 1000 rawDataSize 208000 - totalSize 4009 + totalSize 4064 #### A masked pattern was here #### # Storage Information @@ -385,7 +385,7 @@ Table: acid #### A masked pattern was here #### Partition Parameters: numFiles 4 - totalSize 8011 + totalSize 8119 #### A masked pattern was here #### # Storage Information @@ -432,7 +432,7 @@ Partition Parameters: numFiles 4 numRows 2000 rawDataSize 416000 - totalSize 8011 + totalSize 8119 #### A masked pattern was here #### # Storage Information @@ -667,7 +667,7 @@ Partition Parameters: numFiles 2 numRows 1000 rawDataSize 176000 - totalSize 2979 + totalSize 3008 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/annotate_stats_part.q.out b/ql/src/test/results/clientpositive/annotate_stats_part.q.out index 9e45101..18e8161 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_part.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_part.q.out @@ -90,11 +90,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc_n4 - Statistics: Num rows: 18 Data size: 14640 Basic stats: COMPLETE Column stats: PARTIAL + Statistics: Num rows: 20 Data size: 15680 Basic stats: COMPLETE Column stats: PARTIAL Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 18 Data size: 6840 Basic stats: COMPLETE Column stats: PARTIAL + Statistics: Num rows: 20 Data size: 7600 Basic stats: COMPLETE Column stats: PARTIAL ListSink PREHOOK: query: analyze table loc_orc_n4 partition(year='2001') compute statistics @@ -121,11 +121,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc_n4 - Statistics: Num rows: 8 Data size: 5048 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 5364 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), '__HIVE_DEFAULT_PARTITION__' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 5048 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 5364 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: explain select * from loc_orc_n4 diff --git a/ql/src/test/results/clientpositive/annotate_stats_table.q.out b/ql/src/test/results/clientpositive/annotate_stats_table.q.out index b502957..9604208 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_table.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_table.q.out @@ -81,11 +81,11 @@ STAGE PLANS: Processor Tree: TableScan alias: emp_orc - Statistics: Num rows: 13 Data size: 2444 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2632 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: lastname (type: string), deptid (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 13 Data size: 2444 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2632 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: analyze table emp_orc compute statistics diff --git a/ql/src/test/results/clientpositive/autoColumnStats_4.q.out b/ql/src/test/results/clientpositive/autoColumnStats_4.q.out index 5be906e..d651887 100644 --- a/ql/src/test/results/clientpositive/autoColumnStats_4.q.out +++ b/ql/src/test/results/clientpositive/autoColumnStats_4.q.out @@ -200,7 +200,7 @@ Table Type: MANAGED_TABLE Table Parameters: bucketing_version 2 numFiles 2 - totalSize 1856 + totalSize 1907 transactional true transactional_properties default #### A masked pattern was here #### @@ -243,7 +243,7 @@ Table Parameters: COLUMN_STATS_ACCURATE {} bucketing_version 2 numFiles 4 - totalSize 3000 + totalSize 3091 transactional true transactional_properties default #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out b/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out index a717b80..a8548c2 100644 --- a/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out +++ b/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out @@ -69,7 +69,7 @@ bucketing_version 2 numFiles 1 numRows 2 rawDataSize 408 -totalSize 453 +totalSize 457 #### A masked pattern was here #### PREHOOK: query: create materialized view if not exists cmv_mat_view2_n4 enable rewrite as select a, c from cmv_basetable_n10 where a = 3 @@ -102,7 +102,7 @@ bucketing_version 2 numFiles 1 numRows 2 rawDataSize 232 -totalSize 322 +totalSize 326 #### A masked pattern was here #### PREHOOK: query: explain select a, c from cmv_basetable_n10 where a = 3 diff --git a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out index fa58add..aafa386 100644 --- a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out +++ b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out @@ -42,7 +42,7 @@ Table Parameters: numFiles 2 numRows 3 rawDataSize 24 - totalSize 547 + totalSize 567 #### A masked pattern was here #### # Storage Information @@ -87,7 +87,7 @@ Table Parameters: numFiles 2 numRows 3 rawDataSize 24 - totalSize 547 + totalSize 567 #### A masked pattern was here #### # Storage Information @@ -177,7 +177,7 @@ Table Parameters: numFiles 2 numRows 3 rawDataSize 24 - totalSize 547 + totalSize 567 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/deleteAnalyze.q.out b/ql/src/test/results/clientpositive/deleteAnalyze.q.out index d98114b..0661885 100644 --- a/ql/src/test/results/clientpositive/deleteAnalyze.q.out +++ b/ql/src/test/results/clientpositive/deleteAnalyze.q.out @@ -54,7 +54,7 @@ Table Parameters: numFiles 1 numRows 2 rawDataSize 634 - totalSize 578 + totalSize 579 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/druid/druidmini_mv.q.out b/ql/src/test/results/clientpositive/druid/druidmini_mv.q.out index 35f4c1b..1f36b34 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_mv.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_mv.q.out @@ -337,7 +337,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_n2 - Statistics: Num rows: 30 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 372 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (a = 3) (type: boolean) Statistics: Num rows: 5 Data size: 60 Basic stats: COMPLETE Column stats: NONE @@ -353,7 +353,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_n2 - Statistics: Num rows: 30 Data size: 480 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 496 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((a = 3) and (d = 3)) (type: boolean) Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE @@ -454,7 +454,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_n2 - Statistics: Num rows: 30 Data size: 21960 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 22692 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (a = 3) (type: boolean) Statistics: Num rows: 5 Data size: 3660 Basic stats: COMPLETE Column stats: NONE @@ -545,7 +545,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_n2 - Statistics: Num rows: 30 Data size: 480 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 496 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((a = 3) and (d = 3)) (type: boolean) Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_date.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_date.q.out index f1cd05c..48a0951 100644 --- a/ql/src/test/results/clientpositive/extrapolate_part_stats_date.q.out +++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_date.q.out @@ -148,7 +148,7 @@ STAGE PLANS: serialization.ddl struct date_dim_n1 { date d_date} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 193 + totalSize 199 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -194,7 +194,7 @@ STAGE PLANS: serialization.ddl struct date_dim_n1 { date d_date} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 193 + totalSize 199 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -240,7 +240,7 @@ STAGE PLANS: serialization.ddl struct date_dim_n1 { date d_date} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 193 + totalSize 199 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -286,7 +286,7 @@ STAGE PLANS: serialization.ddl struct date_dim_n1 { date d_date} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 193 + totalSize 199 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out index bec6dd4..f80599d 100644 --- a/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out +++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out @@ -135,7 +135,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 369 + totalSize 383 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -181,7 +181,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 389 + totalSize 404 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -250,7 +250,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 369 + totalSize 383 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -296,7 +296,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 389 + totalSize 404 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -455,7 +455,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 281 + totalSize 291 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -502,7 +502,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 283 + totalSize 293 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -549,7 +549,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 265 + totalSize 275 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -596,7 +596,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 265 + totalSize 275 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -666,7 +666,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 281 + totalSize 291 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -713,7 +713,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 283 + totalSize 293 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -760,7 +760,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 265 + totalSize 275 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -807,7 +807,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 265 + totalSize 275 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out index 1a13b21..e1024be 100644 --- a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out +++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out @@ -161,7 +161,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 369 + totalSize 383 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -207,7 +207,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 390 + totalSize 405 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -253,7 +253,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 410 + totalSize 426 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -299,7 +299,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 419 + totalSize 433 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -368,7 +368,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 369 + totalSize 383 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -414,7 +414,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 390 + totalSize 405 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -460,7 +460,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 410 + totalSize 426 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -506,7 +506,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 419 + totalSize 433 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -603,7 +603,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 369 + totalSize 383 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -649,7 +649,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 390 + totalSize 405 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -695,7 +695,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 410 + totalSize 426 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -741,7 +741,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 419 + totalSize 433 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -810,7 +810,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 369 + totalSize 383 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -856,7 +856,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 390 + totalSize 405 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -902,7 +902,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 410 + totalSize 426 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -948,7 +948,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 419 + totalSize 433 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1100,7 +1100,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 286 + totalSize 296 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1147,7 +1147,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295 + totalSize 305 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1194,7 +1194,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 302 + totalSize 312 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1241,7 +1241,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 281 + totalSize 291 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1288,7 +1288,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 276 + totalSize 286 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1335,7 +1335,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 288 + totalSize 298 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1382,7 +1382,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 297 + totalSize 307 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1429,7 +1429,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 265 + totalSize 275 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1476,7 +1476,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 265 + totalSize 275 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1523,7 +1523,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 298 + totalSize 308 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1570,7 +1570,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295 + totalSize 305 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1640,7 +1640,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 286 + totalSize 296 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1687,7 +1687,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295 + totalSize 305 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1734,7 +1734,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 302 + totalSize 312 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1781,7 +1781,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 281 + totalSize 291 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1828,7 +1828,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 276 + totalSize 286 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1875,7 +1875,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 288 + totalSize 298 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1922,7 +1922,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 297 + totalSize 307 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1969,7 +1969,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 265 + totalSize 275 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2016,7 +2016,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 265 + totalSize 275 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2063,7 +2063,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 298 + totalSize 308 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2110,7 +2110,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295 + totalSize 305 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde diff --git a/ql/src/test/results/clientpositive/llap/acid_bucket_pruning.q.out b/ql/src/test/results/clientpositive/llap/acid_bucket_pruning.q.out index c89c22c..e9fef82 100644 --- a/ql/src/test/results/clientpositive/llap/acid_bucket_pruning.q.out +++ b/ql/src/test/results/clientpositive/llap/acid_bucket_pruning.q.out @@ -45,7 +45,7 @@ STAGE PLANS: alias: acidtbldefault filterExpr: (a = 1) (type: boolean) buckets included: [13,] of 16 - Statistics: Num rows: 1837 Data size: 6988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1854 Data size: 7052 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -102,7 +102,7 @@ STAGE PLANS: serialization.ddl struct acidtbldefault { i32 a} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 34540 + totalSize 34863 transactional true transactional_properties default #### A masked pattern was here #### @@ -125,7 +125,7 @@ STAGE PLANS: serialization.ddl struct acidtbldefault { i32 a} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 34540 + totalSize 34863 transactional true transactional_properties default #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out b/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out index 38a9770..dafd5d9 100644 --- a/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out +++ b/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out @@ -665,22 +665,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: over10k_orc_bucketed - Statistics: Num rows: 1229 Data size: 703430 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1241 Data size: 710230 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ROW__ID (type: struct) outputColumnNames: ROW__ID - Statistics: Num rows: 1229 Data size: 703430 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1241 Data size: 710230 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() keys: ROW__ID (type: struct) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 614 Data size: 51576 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 620 Data size: 52080 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: struct) sort order: + Map-reduce partition columns: _col0 (type: struct) - Statistics: Num rows: 614 Data size: 51576 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 620 Data size: 52080 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: llap LLAP IO: may be used (ACID table) @@ -692,13 +692,13 @@ STAGE PLANS: keys: KEY._col0 (type: struct) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 614 Data size: 51576 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 620 Data size: 52080 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (_col1 > 1L) (type: boolean) - Statistics: Num rows: 204 Data size: 17136 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 204 Data size: 17136 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/llap/alter_merge_orc.q.out b/ql/src/test/results/clientpositive/llap/alter_merge_orc.q.out index 7b33e8e..cf90940 100644 --- a/ql/src/test/results/clientpositive/llap/alter_merge_orc.q.out +++ b/ql/src/test/results/clientpositive/llap/alter_merge_orc.q.out @@ -48,9 +48,9 @@ columns:struct columns { i32 key, string value} partitioned:false partitionColumns: totalNumberFiles:3 -totalFileSize:7545 -maxFileSize:2515 -minFileSize:2515 +totalFileSize:7590 +maxFileSize:2530 +minFileSize:2530 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test @@ -91,9 +91,9 @@ columns:struct columns { i32 key, string value} partitioned:false partitionColumns: totalNumberFiles:1 -totalFileSize:7198 -maxFileSize:7198 -minFileSize:7198 +totalFileSize:7214 +maxFileSize:7214 +minFileSize:7214 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test @@ -171,9 +171,9 @@ columns:struct columns { i32 key, string value} partitioned:true partitionColumns:struct partition_columns { string ds} totalNumberFiles:3 -totalFileSize:7545 -maxFileSize:2515 -minFileSize:2515 +totalFileSize:7590 +maxFileSize:2530 +minFileSize:2530 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test_part_n2 @@ -218,9 +218,9 @@ columns:struct columns { i32 key, string value} partitioned:true partitionColumns:struct partition_columns { string ds} totalNumberFiles:1 -totalFileSize:7198 -maxFileSize:7198 -minFileSize:7198 +totalFileSize:7214 +maxFileSize:7214 +minFileSize:7214 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test_part_n2 diff --git a/ql/src/test/results/clientpositive/llap/alter_merge_stats_orc.q.out b/ql/src/test/results/clientpositive/llap/alter_merge_stats_orc.q.out index c8b831d..5ed7d70 100644 --- a/ql/src/test/results/clientpositive/llap/alter_merge_stats_orc.q.out +++ b/ql/src/test/results/clientpositive/llap/alter_merge_stats_orc.q.out @@ -48,9 +48,9 @@ columns:struct columns { i32 key, string value} partitioned:false partitionColumns: totalNumberFiles:3 -totalFileSize:7545 -maxFileSize:2515 -minFileSize:2515 +totalFileSize:7590 +maxFileSize:2530 +minFileSize:2530 #### A masked pattern was here #### PREHOOK: query: desc extended src_orc_merge_test_stat @@ -93,7 +93,7 @@ Table Parameters: numFiles 3 numRows 1500 rawDataSize 141000 - totalSize 7545 + totalSize 7590 #### A masked pattern was here #### # Storage Information @@ -144,7 +144,7 @@ Table Parameters: numFiles 1 numRows 1500 rawDataSize 141000 - totalSize 7198 + totalSize 7214 #### A masked pattern was here #### # Storage Information @@ -214,9 +214,9 @@ columns:struct columns { i32 key, string value} partitioned:true partitionColumns:struct partition_columns { string ds} totalNumberFiles:3 -totalFileSize:7545 -maxFileSize:2515 -minFileSize:2515 +totalFileSize:7590 +maxFileSize:2530 +minFileSize:2530 #### A masked pattern was here #### PREHOOK: query: desc formatted src_orc_merge_test_part_stat partition (ds='2011') @@ -243,7 +243,7 @@ Partition Parameters: numFiles 3 numRows 1500 rawDataSize 141000 - totalSize 7545 + totalSize 7590 #### A masked pattern was here #### # Storage Information @@ -290,7 +290,7 @@ Partition Parameters: numFiles 3 numRows 1500 rawDataSize 141000 - totalSize 7545 + totalSize 7590 #### A masked pattern was here #### # Storage Information @@ -345,7 +345,7 @@ Partition Parameters: numFiles 1 numRows 1500 rawDataSize 141000 - totalSize 7198 + totalSize 7214 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out b/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out index 11a9c0e..658b057 100644 --- a/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out +++ b/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out @@ -42,7 +42,7 @@ Table Parameters: numFiles 1 numRows 1 rawDataSize 170 - totalSize 273 + totalSize 283 #### A masked pattern was here #### # Storage Information @@ -120,7 +120,7 @@ STAGE PLANS: serialization.ddl struct s_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 273 + totalSize 283 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -142,7 +142,7 @@ STAGE PLANS: serialization.ddl struct s_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 273 + totalSize 283 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.s_n0 @@ -222,7 +222,7 @@ Table Parameters: numFiles 1 numRows 1 rawDataSize 170 - totalSize 273 + totalSize 283 #### A masked pattern was here #### # Storage Information @@ -299,7 +299,7 @@ Table Parameters: numPartitions 2 numRows 2 rawDataSize 340 - totalSize 546 + totalSize 566 #### A masked pattern was here #### # Storage Information @@ -380,7 +380,7 @@ STAGE PLANS: serialization.ddl struct spart_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 273 + totalSize 283 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -425,7 +425,7 @@ STAGE PLANS: serialization.ddl struct spart_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 273 + totalSize 283 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -544,7 +544,7 @@ Table Parameters: numPartitions 2 numRows 2 rawDataSize 340 - totalSize 546 + totalSize 566 #### A masked pattern was here #### # Storage Information @@ -582,7 +582,7 @@ Partition Parameters: numFiles 1 numRows 1 rawDataSize 170 - totalSize 273 + totalSize 283 #### A masked pattern was here #### # Storage Information @@ -620,7 +620,7 @@ Partition Parameters: numFiles 1 numRows 1 rawDataSize 170 - totalSize 273 + totalSize 283 #### A masked pattern was here #### # Storage Information @@ -701,7 +701,7 @@ Table Parameters: numPartitions 2 numRows 2 rawDataSize 340 - totalSize 546 + totalSize 566 #### A masked pattern was here #### # Storage Information @@ -782,7 +782,7 @@ STAGE PLANS: serialization.ddl struct spart_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 273 + totalSize 283 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -896,7 +896,7 @@ Table Parameters: numPartitions 2 numRows 2 rawDataSize 340 - totalSize 546 + totalSize 566 #### A masked pattern was here #### # Storage Information @@ -934,7 +934,7 @@ Partition Parameters: numFiles 1 numRows 1 rawDataSize 170 - totalSize 273 + totalSize 283 #### A masked pattern was here #### # Storage Information @@ -972,7 +972,7 @@ Partition Parameters: numFiles 1 numRows 1 rawDataSize 170 - totalSize 273 + totalSize 283 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/llap/default_constraint.q.out b/ql/src/test/results/clientpositive/llap/default_constraint.q.out index cb69af7..3365569 100644 --- a/ql/src/test/results/clientpositive/llap/default_constraint.q.out +++ b/ql/src/test/results/clientpositive/llap/default_constraint.q.out @@ -1498,7 +1498,7 @@ Table Type: MANAGED_TABLE Table Parameters: bucketing_version 2 numFiles 1 - totalSize 1070 + totalSize 1102 transactional true transactional_properties default #### A masked pattern was here #### @@ -1668,7 +1668,7 @@ Table Parameters: bucketing_version 2 #### A masked pattern was here #### numFiles 2 - totalSize 2140 + totalSize 2204 transactional true transactional_properties default #### A masked pattern was here #### @@ -1748,7 +1748,7 @@ Table Parameters: bucketing_version 2 #### A masked pattern was here #### numFiles 2 - totalSize 2140 + totalSize 2204 transactional true transactional_properties default #### A masked pattern was here #### @@ -1925,7 +1925,7 @@ Table Parameters: bucketing_version 2 #### A masked pattern was here #### numFiles 3 - totalSize 3199 + totalSize 3296 transactional true transactional_properties default #### A masked pattern was here #### @@ -2004,7 +2004,7 @@ Table Parameters: bucketing_version 2 #### A masked pattern was here #### numFiles 3 - totalSize 3199 + totalSize 3296 transactional true transactional_properties default #### A masked pattern was here #### @@ -2084,7 +2084,7 @@ Table Parameters: bucketing_version 2 #### A masked pattern was here #### numFiles 3 - totalSize 3199 + totalSize 3296 transactional true transactional_properties default #### A masked pattern was here #### @@ -2660,7 +2660,7 @@ Table Type: MANAGED_TABLE Table Parameters: bucketing_version 2 numFiles 1 - totalSize 1070 + totalSize 1102 transactional true transactional_properties default #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out b/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out index a125fd6..bf82b32 100644 --- a/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out +++ b/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out @@ -54,7 +54,7 @@ Table Parameters: numFiles 1 numRows 2 rawDataSize 634 - totalSize 578 + totalSize 579 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out index 22f2860..2c448df 100644 --- a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out +++ b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out @@ -1600,7 +1600,7 @@ STAGE PLANS: serialization.ddl struct srcpart_date_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 3038 + totalSize 3052 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1644,7 +1644,7 @@ STAGE PLANS: serialization.ddl struct srcpart_date_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 3038 + totalSize 3052 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1780,7 +1780,7 @@ STAGE PLANS: serialization.ddl struct srcpart_small_n3 { string key1, string value1} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 459 + totalSize 469 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde diff --git a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_user_level.q.out b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_user_level.q.out index 048712e..eefa592 100644 --- a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_user_level.q.out +++ b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_user_level.q.out @@ -866,7 +866,7 @@ STAGE PLANS: serialization.ddl struct srcpart_date_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 3038 + totalSize 3052 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -912,7 +912,7 @@ STAGE PLANS: serialization.ddl struct srcpart_date_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 3038 + totalSize 3052 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1053,7 +1053,7 @@ STAGE PLANS: serialization.ddl struct srcpart_small_n4 { string key1, string value1} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 459 + totalSize 469 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde diff --git a/ql/src/test/results/clientpositive/llap/dynpart_sort_opt_vectorization.q.out b/ql/src/test/results/clientpositive/llap/dynpart_sort_opt_vectorization.q.out index 709100f..29be9a4 100644 --- a/ql/src/test/results/clientpositive/llap/dynpart_sort_opt_vectorization.q.out +++ b/ql/src/test/results/clientpositive/llap/dynpart_sort_opt_vectorization.q.out @@ -980,7 +980,7 @@ Partition Parameters: numFiles 2 numRows 32 rawDataSize 640 - totalSize 1392 + totalSize 1428 #### A masked pattern was here #### # Storage Information @@ -1020,7 +1020,7 @@ Partition Parameters: numFiles 2 numRows 6 rawDataSize 120 - totalSize 1096 + totalSize 1132 #### A masked pattern was here #### # Storage Information @@ -1060,7 +1060,7 @@ Partition Parameters: numFiles 2 numRows 14 rawDataSize 280 - totalSize 1210 + totalSize 1246 #### A masked pattern was here #### # Storage Information @@ -1100,7 +1100,7 @@ Partition Parameters: numFiles 2 numRows 6 rawDataSize 120 - totalSize 1096 + totalSize 1132 #### A masked pattern was here #### # Storage Information @@ -1139,7 +1139,7 @@ Partition Parameters: numFiles 2 numRows 32 rawDataSize 640 - totalSize 1424 + totalSize 1460 #### A masked pattern was here #### # Storage Information @@ -1178,7 +1178,7 @@ Partition Parameters: numFiles 2 numRows 4 rawDataSize 80 - totalSize 936 + totalSize 968 #### A masked pattern was here #### # Storage Information @@ -1217,7 +1217,7 @@ Partition Parameters: numFiles 2 numRows 32 rawDataSize 640 - totalSize 1416 + totalSize 1444 #### A masked pattern was here #### # Storage Information @@ -1256,7 +1256,7 @@ Partition Parameters: numFiles 2 numRows 4 rawDataSize 80 - totalSize 944 + totalSize 978 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization2.q.out b/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization2.q.out index 72f3b8b..0b57fbc 100644 --- a/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization2.q.out +++ b/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization2.q.out @@ -1287,7 +1287,7 @@ Partition Parameters: numFiles 1 numRows 11 rawDataSize 88 - totalSize 454 + totalSize 464 #### A masked pattern was here #### # Storage Information @@ -1345,7 +1345,7 @@ Partition Parameters: numFiles 1 numRows 13 rawDataSize 104 - totalSize 477 + totalSize 487 #### A masked pattern was here #### # Storage Information @@ -1540,7 +1540,7 @@ Partition Parameters: numFiles 1 numRows 11 rawDataSize 88 - totalSize 454 + totalSize 464 #### A masked pattern was here #### # Storage Information @@ -1598,7 +1598,7 @@ Partition Parameters: numFiles 1 numRows 13 rawDataSize 104 - totalSize 477 + totalSize 487 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out b/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out index fa51dd3..8dff107 100644 --- a/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out +++ b/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out @@ -94,19 +94,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_part - Statistics: Num rows: 158 Data size: 60174 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 160 Data size: 61011 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 'foo') (type: boolean) - Statistics: Num rows: 5 Data size: 1904 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1906 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ROW__ID (type: struct) outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 1904 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: struct) sort order: + Map-reduce partition columns: UDFToInteger(_col0) (type: int) - Statistics: Num rows: 5 Data size: 1904 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1906 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: may be used (ACID table) Reducer 2 @@ -115,10 +115,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: struct), 'foo' (type: string), 'bar' (type: string), '2008-04-08' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 5 Data size: 1904 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 1904 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat @@ -189,7 +189,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_part - Statistics: Num rows: 156 Data size: 102424 Basic stats: COMPLETE Column stats: PARTIAL + Statistics: Num rows: 159 Data size: 104357 Basic stats: COMPLETE Column stats: PARTIAL Filter Operator predicate: (key = 'foo') (type: boolean) Statistics: Num rows: 5 Data size: 1355 Basic stats: COMPLETE Column stats: PARTIAL @@ -380,19 +380,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_part_sdpo - Statistics: Num rows: 173 Data size: 66062 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 176 Data size: 67083 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 'foo') (type: boolean) - Statistics: Num rows: 5 Data size: 1909 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1905 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ROW__ID (type: struct) outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 1909 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1905 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: struct) sort order: + Map-reduce partition columns: UDFToInteger(_col0) (type: int) - Statistics: Num rows: 5 Data size: 1909 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1905 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: may be used (ACID table) Reducer 2 @@ -401,10 +401,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: struct), 'foo' (type: string), 'bar' (type: string), '2008-04-08' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 5 Data size: 1909 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1905 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 1909 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1905 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat @@ -475,7 +475,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_part_sdpo - Statistics: Num rows: 168 Data size: 110259 Basic stats: COMPLETE Column stats: PARTIAL + Statistics: Num rows: 171 Data size: 112202 Basic stats: COMPLETE Column stats: PARTIAL Filter Operator predicate: (key = 'foo') (type: boolean) Statistics: Num rows: 5 Data size: 1355 Basic stats: COMPLETE Column stats: PARTIAL @@ -675,19 +675,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_2l_part - Statistics: Num rows: 155 Data size: 59623 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 157 Data size: 60537 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 'foo') (type: boolean) - Statistics: Num rows: 5 Data size: 1923 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1927 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ROW__ID (type: struct) outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 1923 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1927 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: struct) sort order: + Map-reduce partition columns: UDFToInteger(_col0) (type: int) - Statistics: Num rows: 5 Data size: 1923 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1927 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: may be used (ACID table) Reducer 2 @@ -696,10 +696,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: struct), 'foo' (type: string), 'bar' (type: string), '2008-04-08' (type: string), 11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 5 Data size: 1923 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1927 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 1923 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1927 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat @@ -1095,19 +1095,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_2l_part_sdpo - Statistics: Num rows: 155 Data size: 59623 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 157 Data size: 60537 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 'foo') (type: boolean) - Statistics: Num rows: 5 Data size: 1923 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1927 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ROW__ID (type: struct) outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 1923 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1927 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: struct) sort order: + Map-reduce partition columns: UDFToInteger(_col0) (type: int) - Statistics: Num rows: 5 Data size: 1923 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1927 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: may be used (ACID table) Reducer 2 @@ -1116,10 +1116,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: struct), 'foo' (type: string), 'bar' (type: string), '2008-04-08' (type: string), 11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 5 Data size: 1923 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1927 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 1923 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1927 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat @@ -1515,7 +1515,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_2l_part_sdpo_no_cp - Statistics: Num rows: 95 Data size: 81448 Basic stats: COMPLETE Column stats: PARTIAL + Statistics: Num rows: 97 Data size: 82932 Basic stats: COMPLETE Column stats: PARTIAL Filter Operator predicate: (key = 'foo') (type: boolean) Statistics: Num rows: 5 Data size: 1860 Basic stats: COMPLETE Column stats: PARTIAL diff --git a/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out b/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out index 5a9d263..84477c3 100644 --- a/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out +++ b/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out @@ -3233,19 +3233,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_uami_n1 - Statistics: Num rows: 262 Data size: 82000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 267 Data size: 83640 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((de = 109.23) or (de = 119.23)) and enforce_constraint(vc is not null)) (type: boolean) - Statistics: Num rows: 5 Data size: 1564 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1566 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ROW__ID (type: struct), i (type: int), vc (type: varchar(128)) outputColumnNames: _col0, _col1, _col3 - Statistics: Num rows: 5 Data size: 1564 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1566 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: struct) sort order: + Map-reduce partition columns: UDFToInteger(_col0) (type: int) - Statistics: Num rows: 5 Data size: 1564 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1566 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col3 (type: varchar(128)) Execution mode: vectorized, llap LLAP IO: may be used (ACID table) @@ -3255,10 +3255,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: struct), VALUE._col0 (type: int), 3.14 (type: decimal(5,2)), VALUE._col1 (type: varchar(128)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 5 Data size: 1564 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1566 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 1564 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1566 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat @@ -3326,7 +3326,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_uami_n1 - Statistics: Num rows: 300 Data size: 93808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 305 Data size: 95448 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((de = 3.14) and enforce_constraint((i is not null and vc is not null))) (type: boolean) Statistics: Num rows: 2 Data size: 625 Basic stats: COMPLETE Column stats: NONE diff --git a/ql/src/test/results/clientpositive/llap/extrapolate_part_stats_partial_ndv.q.out b/ql/src/test/results/clientpositive/llap/extrapolate_part_stats_partial_ndv.q.out index 8a0da7d..67eabcb 100644 --- a/ql/src/test/results/clientpositive/llap/extrapolate_part_stats_partial_ndv.q.out +++ b/ql/src/test/results/clientpositive/llap/extrapolate_part_stats_partial_ndv.q.out @@ -285,7 +285,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 531 + totalSize 545 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -331,7 +331,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 562 + totalSize 576 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -377,7 +377,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 580 + totalSize 595 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -423,7 +423,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 602 + totalSize 612 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -662,7 +662,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 531 + totalSize 545 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -708,7 +708,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 562 + totalSize 576 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -754,7 +754,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 580 + totalSize 595 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -800,7 +800,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 602 + totalSize 612 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1075,7 +1075,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 386 + totalSize 402 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1122,7 +1122,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 409 + totalSize 424 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1169,7 +1169,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 423 + totalSize 432 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1216,7 +1216,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 383 + totalSize 401 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1263,7 +1263,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 394 + totalSize 405 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1310,7 +1310,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 387 + totalSize 405 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1357,7 +1357,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 409 + totalSize 424 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1404,7 +1404,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 366 + totalSize 380 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1451,7 +1451,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 361 + totalSize 375 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1498,7 +1498,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 412 + totalSize 424 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1545,7 +1545,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 412 + totalSize 424 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde diff --git a/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out b/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out index b0089ef..d61917d 100644 --- a/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out +++ b/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out @@ -170,7 +170,7 @@ Table Type: MANAGED_TABLE Table Parameters: bucketing_version 2 numFiles 1 - totalSize 295436 + totalSize 295544 transactional true transactional_properties default #### A masked pattern was here #### @@ -205,9 +205,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_ivot - Statistics: Num rows: 5861 Data size: 2954360 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 5863 Data size: 2955440 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 5861 Data size: 2954360 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 5863 Data size: 2955440 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() mode: hash @@ -376,7 +376,7 @@ Table Type: MANAGED_TABLE Table Parameters: bucketing_version 2 numFiles 1 - totalSize 1572 + totalSize 1625 transactional true transactional_properties default #### A masked pattern was here #### @@ -411,9 +411,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_ivot - Statistics: Num rows: 31 Data size: 15720 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 32 Data size: 16250 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 31 Data size: 15720 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 32 Data size: 16250 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() mode: hash @@ -509,7 +509,7 @@ Table Type: MANAGED_TABLE Table Parameters: bucketing_version 2 numFiles 2 - totalSize 3146 + totalSize 3250 transactional true transactional_properties default #### A masked pattern was here #### @@ -544,9 +544,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_ivot - Statistics: Num rows: 62 Data size: 31460 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 64 Data size: 32500 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 62 Data size: 31460 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 64 Data size: 32500 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() mode: hash @@ -638,7 +638,7 @@ Table Type: MANAGED_TABLE Table Parameters: bucketing_version 2 numFiles 3 - totalSize 298582 + totalSize 298795 transactional true transactional_properties default #### A masked pattern was here #### @@ -673,9 +673,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_ivot - Statistics: Num rows: 5924 Data size: 2985820 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 5928 Data size: 2987950 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 5924 Data size: 2985820 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 5928 Data size: 2987950 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() mode: hash diff --git a/ql/src/test/results/clientpositive/llap/llap_acid2.q.out b/ql/src/test/results/clientpositive/llap/llap_acid2.q.out index 4d74a17..c3e9c2a 100644 --- a/ql/src/test/results/clientpositive/llap/llap_acid2.q.out +++ b/ql/src/test/results/clientpositive/llap/llap_acid2.q.out @@ -16,8 +16,10 @@ PREHOOK: query: CREATE TABLE orc_llap_n2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='true') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='true','orc.write.format'='UNSTABLE-PRE-2.0') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_llap_n2 @@ -35,8 +37,10 @@ POSTHOOK: query: CREATE TABLE orc_llap_n2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='true') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='true','orc.write.format'='UNSTABLE-PRE-2.0') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_llap_n2 @@ -44,7 +48,8 @@ PREHOOK: query: insert into table orc_llap_n2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc PREHOOK: Output: default@orc_llap_n2 @@ -52,13 +57,58 @@ POSTHOOK: query: insert into table orc_llap_n2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc POSTHOOK: Output: default@orc_llap_n2 POSTHOOK: Lineage: orc_llap_n2.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cbigint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cbigint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n2.cdecimal2 EXPRESSION [] +POSTHOOK: Lineage: orc_llap_n2.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdouble0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdouble1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +PREHOOK: query: alter table orc_llap_n2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@orc_llap_n2 +PREHOOK: Output: default@orc_llap_n2 +POSTHOOK: query: alter table orc_llap_n2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@orc_llap_n2 +POSTHOOK: Output: default@orc_llap_n2 +PREHOOK: query: insert into table orc_llap_n2 +select cint, cbigint, cfloat, cdouble, + cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, + cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, + cstring1, cfloat as c9, cast("3.321" as decimal(10,3))as c10, + cast("9.987654321" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n2 +POSTHOOK: query: insert into table orc_llap_n2 +select cint, cbigint, cfloat, cdouble, + cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, + cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, + cstring1, cfloat as c9, cast("3.321" as decimal(10,3))as c10, + cast("9.987654321" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n2 +POSTHOOK: Lineage: orc_llap_n2.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cbigint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cbigint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n2.cdecimal2 EXPRESSION [] POSTHOOK: Lineage: orc_llap_n2.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cdouble0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cdouble1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] @@ -84,8 +134,10 @@ PREHOOK: query: CREATE TABLE orc_llap2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='false') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='false', 'orc.write.format'='UNSTABLE-PRE-2.0') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_llap2 @@ -103,8 +155,10 @@ POSTHOOK: query: CREATE TABLE orc_llap2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='false') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='false', 'orc.write.format'='UNSTABLE-PRE-2.0') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_llap2 @@ -112,7 +166,8 @@ PREHOOK: query: insert into table orc_llap2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc PREHOOK: Output: default@orc_llap2 @@ -120,13 +175,16 @@ POSTHOOK: query: insert into table orc_llap2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc POSTHOOK: Output: default@orc_llap2 POSTHOOK: Lineage: orc_llap2.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap2.cbigint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap2.cbigint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap2.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap2.cdecimal2 EXPRESSION [] POSTHOOK: Lineage: orc_llap2.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap2.cdouble0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap2.cdouble1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] @@ -138,19 +196,21 @@ POSTHOOK: Lineage: orc_llap2.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(n POSTHOOK: Lineage: orc_llap2.cint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] POSTHOOK: Lineage: orc_llap2.cint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] POSTHOOK: Lineage: orc_llap2.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] -PREHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true') +PREHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@orc_llap2 PREHOOK: Output: default@orc_llap2 -POSTHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true') +POSTHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') POSTHOOK: type: ALTERTABLE_PROPERTIES POSTHOOK: Input: default@orc_llap2 POSTHOOK: Output: default@orc_llap2 -PREHOOK: query: update orc_llap2 set cstring1 = 'testvalue' where cstring1 = 'N016jPED08o' +PREHOOK: query: update orc_llap2 set cstring1 = 'testvalue', cdecimal1 = cast("3.321" as decimal(10,3)), +cdecimal2 = cast("9.987654321" as decimal(38,18)) where cstring1 = 'N016jPED08o' PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap2 PREHOOK: Output: default@orc_llap2 -POSTHOOK: query: update orc_llap2 set cstring1 = 'testvalue' where cstring1 = 'N016jPED08o' +POSTHOOK: query: update orc_llap2 set cstring1 = 'testvalue', cdecimal1 = cast("3.321" as decimal(10,3)), +cdecimal2 = cast("9.987654321" as decimal(38,18)) where cstring1 = 'N016jPED08o' POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_llap2 POSTHOOK: Output: default@orc_llap2 @@ -192,6 +252,36 @@ xTlDv24JYv4s 7wH3hBKdO55Xq3gEEe0 5QLs0LVK1g ET3d4F2I4lV +N016jPED08o +Q1JAdUlCVORmR0Q5X5Vf5u6 +eNsh5tYa +5j7GJ8OCXgMVIcK7 +uJGHsW3cd073NGFITyQ +G1u0pUmU6ehCm +mk6lShdOa8kXT8i7mLd3fK +u5C7glqT5XqtO0JE2686lk1 +h4omSc1jcLLwW +tFY2ng51v +vmAT10eeE47fgH20pLi +uN803aW +qqbDw46IgGds4 +32v414p63Jv1B4tO1xy +73xdw4X +d3o1712a03n20qvi62U7 +eQ80MW0h728I204P87YXc +KHtD2A2hp6OjFgS73gdgE +nI30tm7U55O0gI +LSJtFA66 +mby00c +meGb5 +pM6Gt05s1YJeii +LR2AKy0dPt8vFdIV5760jriw +1B3WMD5LSk65B2Moa +xTlDv24JYv4s +28Oe6r21yux7Lk47 +7wH3hBKdO55Xq3gEEe0 +5QLs0LVK1g +ET3d4F2I4lV PREHOOK: query: select cfloat2, cint from orc_llap_n2 PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap_n2 @@ -230,6 +320,36 @@ NULL -899422227 11.0 385623629 11.0 681126962 11.0 25892751 +NULL -838810013 +NULL 246423894 +NULL 708885482 +NULL 186967185 +NULL -595277064 +NULL 584923170 +NULL 518213127 +NULL -334595454 +NULL 241008004 +NULL 185212032 +NULL -738747840 +NULL -971543377 +NULL 940448896 +NULL -324030556 +NULL -899422227 +11.0 835111400 +11.0 -775326158 +11.0 653630202 +11.0 779427499 +11.0 797003983 +11.0 31832752 +11.0 783790031 +11.0 -898241885 +11.0 NULL +11.0 -646295381 +11.0 130912195 +11.0 -391573084 +11.0 385623629 +11.0 681126962 +11.0 25892751 PREHOOK: query: select * from orc_llap_n2 PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap_n2 @@ -238,36 +358,66 @@ POSTHOOK: query: select * from orc_llap_n2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_llap_n2 #### A masked pattern was here #### --838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL N016jPED08o NULL -246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL -708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL -186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL --595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL -584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL -518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL --334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL -241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL -185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL --738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL --971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL -940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL --324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL --899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL -835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 --775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 -653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 -779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 -797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 -31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 -783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 --898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 -NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 --646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 -130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 --391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 -385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 -681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 -25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 +-838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL N016jPED08o NULL 3.321 9.9876543210 +246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL 3.321 9.9876543210 +708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL 3.321 9.9876543210 +186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL 3.321 9.9876543210 +-595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL 3.321 9.9876543210 +584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL 3.321 9.9876543210 +518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL 3.321 9.9876543210 +-334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL 3.321 9.9876543210 +241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL 3.321 9.9876543210 +185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL 3.321 9.9876543210 +-738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL 3.321 9.9876543210 +-971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL 3.321 9.9876543210 +940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL 3.321 9.9876543210 +-324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL 3.321 9.9876543210 +-899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL 3.321 9.9876543210 +835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 3.321 9.9876543210 +-775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 3.321 9.9876543210 +653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 3.321 9.9876543210 +779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 3.321 9.9876543210 +797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 3.321 9.9876543210 +31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 3.321 9.9876543210 +783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 3.321 9.9876543210 +-898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 3.321 9.9876543210 +NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 3.321 9.9876543210 +-646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 3.321 9.9876543210 +130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 3.321 9.9876543210 +-391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 3.321 9.9876543210 +385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 3.321 9.9876543210 +681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 3.321 9.9876543210 +25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 3.321 9.9876543210 +-838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL N016jPED08o NULL 1.123 1.1234567890 +246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL 1.123 1.1234567890 +708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL 1.123 1.1234567890 +186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL 1.123 1.1234567890 +-595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL 1.123 1.1234567890 +584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL 1.123 1.1234567890 +518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL 1.123 1.1234567890 +-334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL 1.123 1.1234567890 +241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL 1.123 1.1234567890 +185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL 1.123 1.1234567890 +-738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL 1.123 1.1234567890 +-971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL 1.123 1.1234567890 +940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL 1.123 1.1234567890 +-324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL 1.123 1.1234567890 +-899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL 1.123 1.1234567890 +835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 1.123 1.1234567890 +-775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 1.123 1.1234567890 +653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 1.123 1.1234567890 +779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 1.123 1.1234567890 +797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 1.123 1.1234567890 +31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 1.123 1.1234567890 +783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 1.123 1.1234567890 +-898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 1.123 1.1234567890 +NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 1.123 1.1234567890 +-646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 1.123 1.1234567890 +130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 1.123 1.1234567890 +-391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 1.123 1.1234567890 +385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 1.123 1.1234567890 +681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 1.123 1.1234567890 +25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 1.123 1.1234567890 PREHOOK: query: select cstring1 from orc_llap2 PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap2 @@ -352,36 +502,36 @@ POSTHOOK: query: select * from orc_llap2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_llap2 #### A masked pattern was here #### -246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL -708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL -186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL --595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL -584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL -518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL --334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL -241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL -185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL --738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL --971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL -940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL --324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL --899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL -835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 --775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 -653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 -779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 -797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 -31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 -783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 --898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 -NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 --646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 -130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 --391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 -385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 -681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 -25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 --838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL testvalue NULL +246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL 1.123 1.1234567890 +708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL 1.123 1.1234567890 +186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL 1.123 1.1234567890 +-595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL 1.123 1.1234567890 +584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL 1.123 1.1234567890 +518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL 1.123 1.1234567890 +-334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL 1.123 1.1234567890 +241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL 1.123 1.1234567890 +185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL 1.123 1.1234567890 +-738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL 1.123 1.1234567890 +-971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL 1.123 1.1234567890 +940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL 1.123 1.1234567890 +-324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL 1.123 1.1234567890 +-899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL 1.123 1.1234567890 +835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 1.123 1.1234567890 +-775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 1.123 1.1234567890 +653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 1.123 1.1234567890 +779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 1.123 1.1234567890 +797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 1.123 1.1234567890 +31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 1.123 1.1234567890 +783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 1.123 1.1234567890 +-898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 1.123 1.1234567890 +NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 1.123 1.1234567890 +-646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 1.123 1.1234567890 +130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 1.123 1.1234567890 +-391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 1.123 1.1234567890 +385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 1.123 1.1234567890 +681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 1.123 1.1234567890 +25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 1.123 1.1234567890 +-838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL testvalue NULL 3.321 9.9876543210 PREHOOK: query: DROP TABLE orc_llap_n2 PREHOOK: type: DROPTABLE PREHOOK: Input: default@orc_llap_n2 diff --git a/ql/src/test/results/clientpositive/llap/llap_decimal64_reader.q.out b/ql/src/test/results/clientpositive/llap/llap_decimal64_reader.q.out new file mode 100644 index 0000000..9569e9c --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/llap_decimal64_reader.q.out @@ -0,0 +1,303 @@ +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("4.456" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("4.456" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + sort order: ++ + Map-reduce partition columns: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + TopN Hash Memory Usage: 0.1 + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: decimal(10,2)), KEY._col1 (type: decimal(38,5)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +POSTHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +4.46 5.56789 +3.35 5.56789 +PREHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + sort order: ++ + Map-reduce partition columns: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + TopN Hash Memory Usage: 0.1 + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled as hive.exec.orc.use.decimal64.column.vectors is set to false] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: decimal(10,2)), KEY._col1 (type: decimal(38,5)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +POSTHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +4.46 5.56789 +3.35 5.56789 +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 diff --git a/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out b/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out index 2393450..7bf003e 100644 --- a/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out +++ b/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out @@ -101,7 +101,7 @@ STAGE PLANS: serialization.ddl struct src_orc_n1 { string key, string value, string ds, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 626 + totalSize 644 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -124,7 +124,7 @@ STAGE PLANS: serialization.ddl struct src_orc_n1 { string key, string value, string ds, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 626 + totalSize 644 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.src_orc_n1 diff --git a/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out b/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out index d827add..e6fa1ac 100644 --- a/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out +++ b/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out @@ -1679,8 +1679,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1696,7 +1696,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: od - Statistics: Num rows: 10 Data size: 2640 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:csmallint:smallint, 1:cint:int, 2:cbigint:bigint, 3:cfloat:float, 4:cdouble:double, 5:cstring1:string, 6:cchar1:char(255), 7:cvchar1:varchar(255), 8:cboolean1:boolean, 9:cboolean2:boolean, 10:ctinyint:tinyint, 11:ROW__ID:struct] @@ -1710,7 +1710,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] - Statistics: Num rows: 10 Data size: 2640 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: _col0 @@ -1745,8 +1745,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2127,8 +2127,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/llap_text.q.out b/ql/src/test/results/clientpositive/llap/llap_text.q.out new file mode 100644 index 0000000..40d08d3 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/llap_text.q.out @@ -0,0 +1,1082 @@ +PREHOOK: query: DROP TABLE text_llap +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE text_llap +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE text_llap( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) +row format serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + + outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap +POSTHOOK: query: CREATE TABLE text_llap( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) +row format serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + + outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap +PREHOOK: query: insert into table text_llap +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap +POSTHOOK: query: insert into table text_llap +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap +POSTHOOK: Lineage: text_llap.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: create table text_llap2( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + bo boolean, + s string, + ts timestamp, + `dec` decimal, + bin binary) +row format delimited fields terminated by '|' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + +outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap2 +POSTHOOK: query: create table text_llap2( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + bo boolean, + s string, + ts timestamp, + `dec` decimal, + bin binary) +row format delimited fields terminated by '|' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + +outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap2 +PREHOOK: query: load data local inpath '../../data/files/over10k.gz' into table text_llap2 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@text_llap2 +POSTHOOK: query: load data local inpath '../../data/files/over10k.gz' into table text_llap2 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@text_llap2 +PREHOOK: query: create table text_llap1 like text_llap +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap1 +POSTHOOK: query: create table text_llap1 like text_llap +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap1 +PREHOOK: query: create table text_llap100 like text_llap +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap100 +POSTHOOK: query: create table text_llap100 like text_llap +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap100 +PREHOOK: query: create table text_llap1000 like text_llap +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap1000 +POSTHOOK: query: create table text_llap1000 like text_llap +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap1000 +PREHOOK: query: insert into table text_llap1 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap1 +POSTHOOK: query: insert into table text_llap1 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap1 +POSTHOOK: Lineage: text_llap1.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap1.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap1.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap1.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap1.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap1.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: insert into table text_llap100 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap100 +POSTHOOK: query: insert into table text_llap100 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap100 +POSTHOOK: Lineage: text_llap100.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap100.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap100.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap100.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap100.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap100.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap100.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap100.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap100.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap100.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap100.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap100.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: insert into table text_llap1000 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1000 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap1000 +POSTHOOK: query: insert into table text_llap1000 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1000 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap1000 +POSTHOOK: Lineage: text_llap1000.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap1000.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1000.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1000.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1000.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 alice carson 2013-03-01 09:11:58.703074 +-2 alice nixon 2013-03-01 09:11:58.703321 +-2 alice underhill 2013-03-01 09:11:58.703122 +-2 alice underhill 2013-03-01 09:11:58.703127 +-2 alice xylophone 2013-03-01 09:11:58.703105 +-2 bob falkner 2013-03-01 09:11:58.703071 +-2 bob king 2013-03-01 09:11:58.703236 +-2 bob ovid 2013-03-01 09:11:58.703285 +-2 bob van buren 2013-03-01 09:11:58.703218 +-2 bob xylophone 2013-03-01 09:11:58.703219 +-2 calvin xylophone 2013-03-01 09:11:58.703083 +-2 david falkner 2013-03-01 09:11:58.703254 +-2 david laertes 2013-03-01 09:11:58.703076 +-2 david miller 2013-03-01 09:11:58.703238 +-3 alice allen 2013-03-01 09:11:58.703323 +-3 alice davidson 2013-03-01 09:11:58.703226 +-3 alice falkner 2013-03-01 09:11:58.703304 +-3 alice king 2013-03-01 09:11:58.70314 +-3 alice king 2013-03-01 09:11:58.703247 +-3 alice xylophone 2013-03-01 09:11:58.703129 +-3 bob ellison 2013-03-01 09:11:58.703261 +-3 bob falkner 2013-03-01 09:11:58.70328 +-3 bob ichabod 2013-03-01 09:11:58.70324 +-3 bob johnson 2013-03-01 09:11:58.703204 +-3 bob polk 2013-03-01 09:11:58.703128 +-3 bob underhill 2013-03-01 09:11:58.703176 +-3 bob underhill 2013-03-01 09:11:58.703188 +-3 bob van buren 2013-03-01 09:11:58.703199 +-3 calvin ichabod 2013-03-01 09:11:58.703213 +-3 calvin white 2013-03-01 09:11:58.703295 +-3 david carson 2013-03-01 09:11:58.703136 +-3 david falkner 2013-03-01 09:11:58.703305 +-3 david garcia 2013-03-01 09:11:58.70319 +-3 david hernandez 2013-03-01 09:11:58.703252 +-3 ethan steinbeck 2013-03-01 09:11:58.703079 +-3 ethan underhill 2013-03-01 09:11:58.703138 +-3 fred ellison 2013-03-01 09:11:58.703233 +-3 gabriella brown 2013-03-01 09:11:58.703288 +-3 holly nixon 2013-03-01 09:11:58.703262 +-3 holly polk 2013-03-01 09:11:58.703273 +-3 holly steinbeck 2013-03-01 09:11:58.703242 +-3 holly thompson 2013-03-01 09:11:58.703073 +-3 holly underhill 2013-03-01 09:11:58.703219 +-3 irene ellison 2013-03-01 09:11:58.703092 +-3 irene underhill 2013-03-01 09:11:58.703298 +-3 irene young 2013-03-01 09:11:58.703084 +-3 jessica johnson 2013-03-01 09:11:58.703319 +-3 jessica king 2013-03-01 09:11:58.703279 +-3 jessica miller 2013-03-01 09:11:58.703245 +-3 jessica white 2013-03-01 09:11:58.703199 +-3 katie ichabod 2013-03-01 09:11:58.703139 +-3 luke garcia 2013-03-01 09:11:58.703076 +-3 luke ichabod 2013-03-01 09:11:58.703294 +-3 luke king 2013-03-01 09:11:58.703207 +-3 luke young 2013-03-01 09:11:58.703182 +-3 mike allen 2013-03-01 09:11:58.703292 +-3 mike king 2013-03-01 09:11:58.703214 +-3 mike polk 2013-03-01 09:11:58.70319 +-3 mike white 2013-03-01 09:11:58.703087 +-3 mike xylophone 2013-03-01 09:11:58.703308 +-3 nick nixon 2013-03-01 09:11:58.703083 +-3 nick robinson 2013-03-01 09:11:58.703147 +-3 oscar davidson 2013-03-01 09:11:58.703071 +-3 oscar garcia 2013-03-01 09:11:58.703282 +-3 oscar johnson 2013-03-01 09:11:58.70311 +-3 oscar johnson 2013-03-01 09:11:58.703133 +-3 oscar miller 2013-03-01 09:11:58.70332 +-3 priscilla laertes 2013-03-01 09:11:58.70325 +-3 priscilla quirinius 2013-03-01 09:11:58.703228 +-3 priscilla zipper 2013-03-01 09:11:58.703321 +-3 quinn ellison 2013-03-01 09:11:58.703232 +-3 quinn polk 2013-03-01 09:11:58.703244 +-3 rachel davidson 2013-03-01 09:11:58.703316 +-3 rachel thompson 2013-03-01 09:11:58.703276 +-3 sarah miller 2013-03-01 09:11:58.70316 +-3 sarah robinson 2013-03-01 09:11:58.703288 +-3 sarah xylophone 2013-03-01 09:11:58.703112 +-3 sarah zipper 2013-03-01 09:11:58.703289 +-3 tom hernandez 2013-03-01 09:11:58.703108 +-3 tom hernandez 2013-03-01 09:11:58.703188 +-3 tom polk 2013-03-01 09:11:58.703217 +-3 tom steinbeck 2013-03-01 09:11:58.703251 +-3 ulysses carson 2013-03-01 09:11:58.703253 +-3 ulysses ellison 2013-03-01 09:11:58.703197 +-3 ulysses quirinius 2013-03-01 09:11:58.703189 +-3 ulysses robinson 2013-03-01 09:11:58.703227 +-3 ulysses steinbeck 2013-03-01 09:11:58.703259 +-3 victor allen 2013-03-01 09:11:58.703155 +-3 victor hernandez 2013-03-01 09:11:58.703176 +-3 victor robinson 2013-03-01 09:11:58.703305 +-3 victor thompson 2013-03-01 09:11:58.703299 +-3 victor xylophone 2013-03-01 09:11:58.703135 +-3 wendy quirinius 2013-03-01 09:11:58.703266 +-3 wendy robinson 2013-03-01 09:11:58.703294 +-3 wendy xylophone 2013-03-01 09:11:58.703191 +-3 xavier garcia 2013-03-01 09:11:58.703194 +-3 xavier ovid 2013-03-01 09:11:58.703148 +-3 yuri xylophone 2013-03-01 09:11:58.703258 +-3 zach thompson 2013-03-01 09:11:58.703252 +-3 zach young 2013-03-01 09:11:58.703191 +PREHOOK: query: select * from text_llap2 order by t, s, ts limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select * from text_llap2 order by t, s, ts limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 305 65767 4294967529 76.54 4.72 true calvin xylophone 2013-03-01 09:11:58.703083 69 quiet hour +-2 331 65707 4294967335 67.12 13.51 false bob ovid 2013-03-01 09:11:58.703285 62 joggying +-2 373 65548 4294967423 16.98 43.6 true alice nixon 2013-03-01 09:11:58.703321 53 debate +-2 378 65553 4294967461 9.81 10.36 true bob king 2013-03-01 09:11:58.703236 91 opthamology +-2 389 65706 4294967488 26.68 17.93 false alice underhill 2013-03-01 09:11:58.703122 87 forestry +-2 389 65738 4294967520 99.45 26.26 true bob falkner 2013-03-01 09:11:58.703071 17 nap time +-2 393 65715 4294967305 48.3 1.85 true alice xylophone 2013-03-01 09:11:58.703105 30 values clariffication +-2 406 65582 4294967311 20.94 35.74 false bob van buren 2013-03-01 09:11:58.703218 25 opthamology +-2 406 65762 4294967443 1.79 33.42 false david falkner 2013-03-01 09:11:58.703254 58 opthamology +-2 407 65612 4294967318 25.48 41.56 true david laertes 2013-03-01 09:11:58.703076 40 forestry +-2 427 65666 4294967465 19.69 33.24 true bob xylophone 2013-03-01 09:11:58.703219 33 joggying +-2 446 65790 4294967302 6.49 10.81 false alice underhill 2013-03-01 09:11:58.703127 44 undecided +-2 450 65727 4294967487 94.57 30.4 false david miller 2013-03-01 09:11:58.703238 40 religion +-2 473 65565 4294967320 87.78 12.26 true alice carson 2013-03-01 09:11:58.703074 90 xylophone band +-3 260 65595 4294967545 59.07 6.75 false bob falkner 2013-03-01 09:11:58.70328 37 chemistry +-3 264 65776 4294967398 20.95 5.97 false bob polk 2013-03-01 09:11:58.703128 93 joggying +-3 266 65736 4294967397 19.94 10.01 false quinn ellison 2013-03-01 09:11:58.703232 89 forestry +-3 268 65710 4294967448 82.74 12.48 true holly polk 2013-03-01 09:11:58.703273 15 undecided +-3 270 65702 4294967512 38.05 1.07 true david carson 2013-03-01 09:11:58.703136 28 philosophy +-3 275 65543 4294967522 74.92 17.29 false mike king 2013-03-01 09:11:58.703214 53 opthamology +-3 275 65575 4294967441 38.22 2.43 true sarah xylophone 2013-03-01 09:11:58.703112 93 wind surfing +-3 275 65622 4294967302 71.78 8.49 false wendy robinson 2013-03-01 09:11:58.703294 95 undecided +-3 279 65661 4294967536 25.5 0.02 false wendy quirinius 2013-03-01 09:11:58.703266 75 undecided +-3 280 65548 4294967350 52.3 33.06 true calvin white 2013-03-01 09:11:58.703295 30 quiet hour +-3 280 65597 4294967377 18.44 49.8 true alice falkner 2013-03-01 09:11:58.703304 74 zync studies +-3 280 65769 4294967324 28.78 35.05 true xavier ovid 2013-03-01 09:11:58.703148 43 kindergarten +-3 284 65566 4294967400 62.81 39.1 false jessica white 2013-03-01 09:11:58.703199 70 opthamology +-3 286 65573 4294967493 18.27 23.71 false zach young 2013-03-01 09:11:58.703191 22 kindergarten +-3 289 65757 4294967528 56.2 44.24 true luke ichabod 2013-03-01 09:11:58.703294 7 yard duty +-3 298 65720 4294967305 34.6 39.7 false ethan steinbeck 2013-03-01 09:11:58.703079 35 kindergarten +-3 299 65763 4294967542 85.96 10.45 true jessica miller 2013-03-01 09:11:58.703245 26 mathematics +-3 303 65617 4294967473 10.26 1.41 false ulysses quirinius 2013-03-01 09:11:58.703189 84 chemistry +-3 307 65634 4294967546 90.3 28.44 false irene underhill 2013-03-01 09:11:58.703298 85 forestry +-3 311 65569 4294967460 3.82 35.45 false luke garcia 2013-03-01 09:11:58.703076 93 chemistry +-3 313 65540 4294967316 25.67 39.88 false ulysses robinson 2013-03-01 09:11:58.703227 61 religion +-3 314 65670 4294967330 13.67 34.86 false wendy xylophone 2013-03-01 09:11:58.703191 85 mathematics +-3 315 65671 4294967412 94.22 25.96 true oscar johnson 2013-03-01 09:11:58.703133 89 nap time +-3 316 65696 4294967445 22.0 43.41 false priscilla laertes 2013-03-01 09:11:58.70325 51 values clariffication +-3 318 65553 4294967452 9.86 32.77 false holly underhill 2013-03-01 09:11:58.703219 47 wind surfing +-3 320 65644 4294967434 84.39 48.0 false sarah robinson 2013-03-01 09:11:58.703288 72 wind surfing +-3 324 65773 4294967296 11.07 25.95 true oscar miller 2013-03-01 09:11:58.70332 57 opthamology +-3 333 65562 4294967359 22.34 35.58 false ulysses steinbeck 2013-03-01 09:11:58.703259 87 xylophone band +-3 335 65696 4294967333 72.26 9.66 true nick nixon 2013-03-01 09:11:58.703083 85 philosophy +-3 337 65629 4294967521 55.59 6.54 true luke king 2013-03-01 09:11:58.703207 59 industrial engineering +-3 337 65658 4294967361 43.4 12.05 false victor allen 2013-03-01 09:11:58.703155 45 topology +-3 339 65671 4294967311 8.37 15.98 true bob ellison 2013-03-01 09:11:58.703261 14 linguistics +-3 339 65737 4294967453 14.23 26.66 true ethan underhill 2013-03-01 09:11:58.703138 95 xylophone band +-3 343 65783 4294967378 7.1 18.16 true ulysses carson 2013-03-01 09:11:58.703253 97 mathematics +-3 344 65733 4294967363 0.56 11.96 true rachel thompson 2013-03-01 09:11:58.703276 88 wind surfing +-3 344 65756 4294967378 52.13 18.95 true victor thompson 2013-03-01 09:11:58.703299 81 topology +-3 346 65752 4294967298 56.05 34.03 false tom polk 2013-03-01 09:11:58.703217 49 zync studies +-3 350 65566 4294967434 23.22 6.68 true nick robinson 2013-03-01 09:11:58.703147 24 education +-3 362 65712 4294967325 43.73 48.74 false oscar garcia 2013-03-01 09:11:58.703282 30 chemistry +-3 374 65731 4294967388 22.35 22.71 true bob johnson 2013-03-01 09:11:58.703204 80 biology +-3 376 65548 4294967431 96.78 43.23 false fred ellison 2013-03-01 09:11:58.703233 75 education +-3 376 65766 4294967326 97.88 5.58 true sarah zipper 2013-03-01 09:11:58.703289 49 study skills +-3 381 65640 4294967379 59.34 7.97 false ulysses ellison 2013-03-01 09:11:58.703197 32 undecided +-3 384 65613 4294967470 63.49 45.85 false holly steinbeck 2013-03-01 09:11:58.703242 54 chemistry +-3 384 65676 4294967453 71.97 31.52 false alice davidson 2013-03-01 09:11:58.703226 14 xylophone band +-3 386 65611 4294967331 58.81 22.43 true sarah miller 2013-03-01 09:11:58.70316 75 mathematics +-3 386 65716 4294967496 12.12 2.37 false zach thompson 2013-03-01 09:11:58.703252 16 linguistics +-3 387 65550 4294967355 84.75 22.75 true holly thompson 2013-03-01 09:11:58.703073 52 biology +-3 400 65557 4294967503 76.31 29.44 false alice allen 2013-03-01 09:11:58.703323 19 debate +-3 408 65667 4294967509 81.68 45.9 true david hernandez 2013-03-01 09:11:58.703252 52 topology +-3 414 65608 4294967338 81.39 49.09 true tom steinbeck 2013-03-01 09:11:58.703251 11 xylophone band +-3 415 65571 4294967536 61.81 24.24 true victor robinson 2013-03-01 09:11:58.703305 23 american history +-3 423 65646 4294967378 63.19 34.04 false priscilla quirinius 2013-03-01 09:11:58.703228 35 xylophone band +-3 430 65667 4294967469 65.5 40.46 true yuri xylophone 2013-03-01 09:11:58.703258 31 american history +-3 431 65635 4294967500 29.06 0.34 false calvin ichabod 2013-03-01 09:11:58.703213 29 undecided +-3 432 65646 4294967492 0.83 27.18 true oscar davidson 2013-03-01 09:11:58.703071 56 linguistics +-3 433 65654 4294967455 6.83 5.33 false bob van buren 2013-03-01 09:11:58.703199 29 yard duty +-3 438 65618 4294967398 62.39 4.62 false victor xylophone 2013-03-01 09:11:58.703135 88 values clariffication +-3 447 65755 4294967320 43.69 20.03 false victor hernandez 2013-03-01 09:11:58.703176 14 forestry +-3 448 65610 4294967314 81.97 31.11 true mike xylophone 2013-03-01 09:11:58.703308 79 opthamology +-3 451 65696 4294967532 6.8 40.07 false luke young 2013-03-01 09:11:58.703182 27 biology +-3 454 65627 4294967481 17.6 35.72 false bob underhill 2013-03-01 09:11:58.703188 67 religion +-3 454 65705 4294967468 62.12 14.32 true mike white 2013-03-01 09:11:58.703087 40 joggying +-3 454 65733 4294967544 73.83 18.42 false bob ichabod 2013-03-01 09:11:58.70324 96 debate +-3 455 65570 4294967304 2.48 30.76 false alice king 2013-03-01 09:11:58.70314 42 forestry +-3 458 65563 4294967315 62.77 41.5 false alice king 2013-03-01 09:11:58.703247 3 mathematics +-3 458 65679 4294967331 64.29 43.8 true irene young 2013-03-01 09:11:58.703084 3 american history +-3 458 65696 4294967418 45.24 8.49 false irene ellison 2013-03-01 09:11:58.703092 54 american history +-3 459 65644 4294967456 92.71 0.08 false jessica king 2013-03-01 09:11:58.703279 53 joggying +-3 465 65551 4294967457 83.39 46.64 true mike allen 2013-03-01 09:11:58.703292 53 values clariffication +-3 465 65735 4294967298 72.3 22.58 false bob underhill 2013-03-01 09:11:58.703176 81 joggying +-3 467 65575 4294967437 81.64 23.53 true tom hernandez 2013-03-01 09:11:58.703188 33 study skills +-3 469 65577 4294967451 88.78 32.96 true katie ichabod 2013-03-01 09:11:58.703139 69 undecided +-3 469 65698 4294967357 47.51 49.22 true david falkner 2013-03-01 09:11:58.703305 78 joggying +-3 469 65752 4294967350 55.41 32.11 true oscar johnson 2013-03-01 09:11:58.70311 47 philosophy +-3 477 65785 4294967464 97.51 10.84 true tom hernandez 2013-03-01 09:11:58.703108 7 history +-3 485 65661 4294967441 26.21 16.19 false alice xylophone 2013-03-01 09:11:58.703129 97 topology +-3 485 65669 4294967428 21.34 13.07 false priscilla zipper 2013-03-01 09:11:58.703321 28 quiet hour +-3 485 65684 4294967483 11.83 8.04 false david garcia 2013-03-01 09:11:58.70319 63 wind surfing +-3 493 65662 4294967482 28.75 30.21 false xavier garcia 2013-03-01 09:11:58.703194 5 education +-3 494 65589 4294967369 48.09 14.4 false jessica johnson 2013-03-01 09:11:58.703319 79 nap time +-3 498 65751 4294967331 80.65 0.28 true gabriella brown 2013-03-01 09:11:58.703288 61 opthamology +-3 500 65704 4294967480 2.26 28.79 true mike polk 2013-03-01 09:11:58.70319 4 nap time +-3 505 65565 4294967407 68.73 4.65 true holly nixon 2013-03-01 09:11:58.703262 15 debate +-3 507 65671 4294967305 60.28 41.5 false quinn polk 2013-03-01 09:11:58.703244 77 industrial engineering +-3 507 65728 4294967525 81.95 47.14 true rachel davidson 2013-03-01 09:11:58.703316 31 study skills +PREHOOK: query: select t, f, s from text_llap2 order by t, s, f limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select t, f, s from text_llap2 order by t, s, f limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 1.79 david falkner +-2 16.98 alice nixon +-2 19.69 bob xylophone +-2 20.94 bob van buren +-2 25.48 david laertes +-2 26.68 alice underhill +-2 48.3 alice xylophone +-2 6.49 alice underhill +-2 67.12 bob ovid +-2 76.54 calvin xylophone +-2 87.78 alice carson +-2 9.81 bob king +-2 94.57 david miller +-2 99.45 bob falkner +-3 0.56 rachel thompson +-3 0.83 oscar davidson +-3 10.26 ulysses quirinius +-3 11.07 oscar miller +-3 11.83 david garcia +-3 12.12 zach thompson +-3 13.67 wendy xylophone +-3 14.23 ethan underhill +-3 17.6 bob underhill +-3 18.27 zach young +-3 18.44 alice falkner +-3 19.94 quinn ellison +-3 2.26 mike polk +-3 2.48 alice king +-3 20.95 bob polk +-3 21.34 priscilla zipper +-3 22.0 priscilla laertes +-3 22.34 ulysses steinbeck +-3 22.35 bob johnson +-3 23.22 nick robinson +-3 25.5 wendy quirinius +-3 25.67 ulysses robinson +-3 26.21 alice xylophone +-3 28.75 xavier garcia +-3 28.78 xavier ovid +-3 29.06 calvin ichabod +-3 3.82 luke garcia +-3 34.6 ethan steinbeck +-3 38.05 david carson +-3 38.22 sarah xylophone +-3 43.4 victor allen +-3 43.69 victor hernandez +-3 43.73 oscar garcia +-3 45.24 irene ellison +-3 47.51 david falkner +-3 48.09 jessica johnson +-3 52.13 victor thompson +-3 52.3 calvin white +-3 55.41 oscar johnson +-3 55.59 luke king +-3 56.05 tom polk +-3 56.2 luke ichabod +-3 58.81 sarah miller +-3 59.07 bob falkner +-3 59.34 ulysses ellison +-3 6.8 luke young +-3 6.83 bob van buren +-3 60.28 quinn polk +-3 61.81 victor robinson +-3 62.12 mike white +-3 62.39 victor xylophone +-3 62.77 alice king +-3 62.81 jessica white +-3 63.19 priscilla quirinius +-3 63.49 holly steinbeck +-3 64.29 irene young +-3 65.5 yuri xylophone +-3 68.73 holly nixon +-3 7.1 ulysses carson +-3 71.78 wendy robinson +-3 71.97 alice davidson +-3 72.26 nick nixon +-3 72.3 bob underhill +-3 73.83 bob ichabod +-3 74.92 mike king +-3 76.31 alice allen +-3 8.37 bob ellison +-3 80.65 gabriella brown +-3 81.39 tom steinbeck +-3 81.64 tom hernandez +-3 81.68 david hernandez +-3 81.95 rachel davidson +-3 81.97 mike xylophone +-3 82.74 holly polk +-3 83.39 mike allen +-3 84.39 sarah robinson +-3 84.75 holly thompson +-3 85.96 jessica miller +-3 88.78 katie ichabod +-3 9.86 holly underhill +-3 90.3 irene underhill +-3 92.71 jessica king +-3 94.22 oscar johnson +-3 96.78 fred ellison +-3 97.51 tom hernandez +-3 97.88 sarah zipper +PREHOOK: query: select ctinyint, cstring1, cboolean2 from text_llap100 order by ctinyint, cstring1, cboolean2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select ctinyint, cstring1, cboolean2 from text_llap100 order by ctinyint, cstring1, cboolean2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-1 cvLH6Eat2yFsyy7p NULL +-11 cvLH6Eat2yFsyy7p NULL +-11 cvLH6Eat2yFsyy7p NULL +-11 cvLH6Eat2yFsyy7p NULL +-12 cvLH6Eat2yFsyy7p NULL +-13 cvLH6Eat2yFsyy7p NULL +-16 cvLH6Eat2yFsyy7p NULL +-16 cvLH6Eat2yFsyy7p NULL +-19 cvLH6Eat2yFsyy7p NULL +-21 cvLH6Eat2yFsyy7p NULL +-21 cvLH6Eat2yFsyy7p NULL +-22 cvLH6Eat2yFsyy7p NULL +-22 cvLH6Eat2yFsyy7p NULL +-22 cvLH6Eat2yFsyy7p NULL +-23 cvLH6Eat2yFsyy7p NULL +-23 cvLH6Eat2yFsyy7p NULL +-23 cvLH6Eat2yFsyy7p NULL +-24 cvLH6Eat2yFsyy7p NULL +-28 cvLH6Eat2yFsyy7p NULL +-28 cvLH6Eat2yFsyy7p NULL +-30 cvLH6Eat2yFsyy7p NULL +-32 cvLH6Eat2yFsyy7p NULL +-33 cvLH6Eat2yFsyy7p NULL +-33 cvLH6Eat2yFsyy7p NULL +-34 cvLH6Eat2yFsyy7p NULL +-34 cvLH6Eat2yFsyy7p NULL +-36 cvLH6Eat2yFsyy7p NULL +-37 cvLH6Eat2yFsyy7p NULL +-4 cvLH6Eat2yFsyy7p NULL +-4 cvLH6Eat2yFsyy7p NULL +-40 cvLH6Eat2yFsyy7p NULL +-43 cvLH6Eat2yFsyy7p NULL +-44 cvLH6Eat2yFsyy7p NULL +-45 cvLH6Eat2yFsyy7p NULL +-45 cvLH6Eat2yFsyy7p NULL +-47 cvLH6Eat2yFsyy7p NULL +-48 cvLH6Eat2yFsyy7p NULL +-48 cvLH6Eat2yFsyy7p NULL +-5 cvLH6Eat2yFsyy7p NULL +-5 cvLH6Eat2yFsyy7p NULL +-5 cvLH6Eat2yFsyy7p NULL +-50 cvLH6Eat2yFsyy7p NULL +-51 cvLH6Eat2yFsyy7p NULL +-53 cvLH6Eat2yFsyy7p NULL +-54 cvLH6Eat2yFsyy7p NULL +-55 cvLH6Eat2yFsyy7p NULL +-55 cvLH6Eat2yFsyy7p NULL +-56 cvLH6Eat2yFsyy7p NULL +-56 cvLH6Eat2yFsyy7p NULL +-57 cvLH6Eat2yFsyy7p NULL +-59 cvLH6Eat2yFsyy7p NULL +-62 cvLH6Eat2yFsyy7p NULL +-7 cvLH6Eat2yFsyy7p NULL +0 cvLH6Eat2yFsyy7p NULL +0 cvLH6Eat2yFsyy7p NULL +10 cvLH6Eat2yFsyy7p NULL +13 cvLH6Eat2yFsyy7p NULL +16 cvLH6Eat2yFsyy7p NULL +18 cvLH6Eat2yFsyy7p NULL +19 cvLH6Eat2yFsyy7p NULL +2 cvLH6Eat2yFsyy7p NULL +21 cvLH6Eat2yFsyy7p NULL +24 cvLH6Eat2yFsyy7p NULL +24 cvLH6Eat2yFsyy7p NULL +26 cvLH6Eat2yFsyy7p NULL +27 cvLH6Eat2yFsyy7p NULL +27 cvLH6Eat2yFsyy7p NULL +28 cvLH6Eat2yFsyy7p NULL +29 cvLH6Eat2yFsyy7p NULL +29 cvLH6Eat2yFsyy7p NULL +30 cvLH6Eat2yFsyy7p NULL +31 cvLH6Eat2yFsyy7p NULL +31 cvLH6Eat2yFsyy7p NULL +34 cvLH6Eat2yFsyy7p NULL +34 cvLH6Eat2yFsyy7p NULL +36 cvLH6Eat2yFsyy7p NULL +36 cvLH6Eat2yFsyy7p NULL +38 cvLH6Eat2yFsyy7p NULL +38 cvLH6Eat2yFsyy7p NULL +38 cvLH6Eat2yFsyy7p NULL +39 cvLH6Eat2yFsyy7p NULL +4 cvLH6Eat2yFsyy7p NULL +40 cvLH6Eat2yFsyy7p NULL +40 cvLH6Eat2yFsyy7p NULL +41 cvLH6Eat2yFsyy7p NULL +43 cvLH6Eat2yFsyy7p NULL +46 cvLH6Eat2yFsyy7p NULL +5 cvLH6Eat2yFsyy7p NULL +51 cvLH6Eat2yFsyy7p NULL +51 cvLH6Eat2yFsyy7p NULL +53 cvLH6Eat2yFsyy7p NULL +53 cvLH6Eat2yFsyy7p NULL +61 cvLH6Eat2yFsyy7p NULL +61 cvLH6Eat2yFsyy7p NULL +61 cvLH6Eat2yFsyy7p NULL +62 cvLH6Eat2yFsyy7p NULL +8 cvLH6Eat2yFsyy7p NULL +9 cvLH6Eat2yFsyy7p NULL +NULL cvLH6Eat2yFsyy7p NULL +NULL cvLH6Eat2yFsyy7p NULL +PREHOOK: query: select * from text_llap100 order by cint, cstring1, cstring2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select * from text_llap100 order by cint, cstring1, cstring2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-1 -75 528534767 NULL -1.389 -863.257 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.331 1969-12-31 16:00:07.585 true NULL +-11 -15431 528534767 NULL -11.0 -15431.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.176 1969-12-31 16:00:07.787 true NULL +-11 7476 528534767 NULL -11.0 7476.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.551 1969-12-31 15:59:57.567 true NULL +-11 9472 528534767 NULL -11.0 9472.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:47.917 1969-12-31 16:00:03.716 true NULL +-12 -2013 528534767 NULL -12.0 -2013.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:14.907 1969-12-31 15:59:58.789 true NULL +-13 -13372 528534767 NULL -13.0 -13372.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.499 1969-12-31 15:59:48.221 true NULL +-16 -6922 528534767 NULL -16.0 -6922.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.402 1969-12-31 15:59:50.561 true NULL +-16 -7964 528534767 NULL -16.0 -7964.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.035 1969-12-31 16:00:12.464 true NULL +-19 1206 528534767 NULL -19.0 1206.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.587 1969-12-31 16:00:08.381 true NULL +-21 -7183 528534767 NULL -21.0 -7183.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.035 1969-12-31 16:00:06.182 true NULL +-21 3168 528534767 NULL -21.0 3168.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.834 1969-12-31 16:00:13.331 true NULL +-22 3856 528534767 NULL -22.0 3856.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:48.508 1969-12-31 15:59:54.534 true NULL +-22 77 528534767 NULL -22.0 77.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.928 1969-12-31 15:59:43.621 true NULL +-22 8499 528534767 NULL -22.0 8499.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:15.626 1969-12-31 16:00:10.923 true NULL +-23 -10154 528534767 NULL -23.0 -10154.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.088 1969-12-31 15:59:56.086 true NULL +-23 13026 528534767 NULL -23.0 13026.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:14.625 1969-12-31 16:00:10.77 true NULL +-23 4587 528534767 NULL -23.0 4587.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.732 1969-12-31 15:59:48.52 true NULL +-24 163 528534767 NULL -24.0 163.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.51 1969-12-31 16:00:04.014 true NULL +-28 -15813 528534767 NULL -28.0 -15813.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.787 1969-12-31 16:00:01.546 true NULL +-28 6453 528534767 NULL -28.0 6453.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.475 1969-12-31 16:00:07.828 true NULL +-30 834 528534767 NULL -30.0 834.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:14.072 1969-12-31 16:00:03.004 true NULL +-32 11242 528534767 NULL -32.0 11242.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.091 1969-12-31 15:59:55.681 true NULL +-33 14072 528534767 NULL -33.0 14072.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:03.168 1969-12-31 15:59:55.836 true NULL +-33 7350 528534767 NULL -33.0 7350.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.952 1969-12-31 15:59:48.183 true NULL +-34 15007 528534767 NULL -34.0 15007.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:50.434 1969-12-31 16:00:13.352 true NULL +-34 4181 528534767 NULL -34.0 4181.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:06.557 1969-12-31 16:00:04.869 true NULL +-36 1639 528534767 NULL -36.0 1639.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.186 1969-12-31 16:00:13.098 true NULL +-37 -12472 528534767 NULL -37.0 -12472.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.3 1969-12-31 15:59:55.998 true NULL +-4 -1027 528534767 NULL -4.0 -1027.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:46.628 1969-12-31 16:00:11.413 true NULL +-4 2617 528534767 NULL -4.0 2617.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.21 1969-12-31 15:59:44.733 true NULL +-40 -4463 528534767 NULL -40.0 -4463.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.647 1969-12-31 15:59:46.254 true NULL +-43 486 528534767 NULL -43.0 486.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.345 1969-12-31 15:59:52.667 true NULL +-44 -1299 528534767 NULL -44.0 -1299.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.163 1969-12-31 15:59:47.687 true NULL +-45 -14072 528534767 NULL -45.0 -14072.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.621 1969-12-31 15:59:45.914 true NULL +-45 5521 528534767 NULL -45.0 5521.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.01 1969-12-31 15:59:48.553 true NULL +-47 -2468 528534767 NULL -47.0 -2468.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:48.68 1969-12-31 16:00:02.94 true NULL +-48 -7735 528534767 NULL -48.0 -7735.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.472 1969-12-31 16:00:00.8 true NULL +-48 13300 528534767 NULL -48.0 13300.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.077 1969-12-31 15:59:45.827 true NULL +-5 -13229 528534767 NULL -5.0 -13229.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.834 1969-12-31 16:00:00.388 true NULL +-5 -14379 528534767 NULL -5.0 -14379.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.037 1969-12-31 15:59:49.141 true NULL +-5 12422 528534767 NULL -5.0 12422.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.745 1969-12-31 15:59:48.802 true NULL +-50 -13326 528534767 NULL -50.0 -13326.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:46.674 1969-12-31 16:00:08.875 true NULL +-51 -12083 528534767 NULL -51.0 -12083.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.026 1969-12-31 16:00:02.52 true NULL +-53 -3419 528534767 NULL -53.0 -3419.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:46.771 1969-12-31 15:59:53.744 true NULL +-54 -10268 528534767 NULL -54.0 -10268.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:53.417 1969-12-31 16:00:00.687 true NULL +-55 -7353 528534767 NULL -55.0 -7353.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.941 1969-12-31 15:59:54.268 true NULL +-55 -7449 528534767 NULL -55.0 -7449.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.846 1969-12-31 15:59:55.75 true NULL +-56 8353 528534767 NULL -56.0 8353.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:11.242 1969-12-31 15:59:46.526 true NULL +-56 8402 528534767 NULL -56.0 8402.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.01 1969-12-31 16:00:05.146 true NULL +-57 -11492 528534767 NULL -57.0 -11492.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.261 1969-12-31 16:00:05.306 true NULL +-59 10688 528534767 NULL -59.0 10688.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.746 1969-12-31 16:00:15.489 true NULL +-62 10 528534767 NULL -62.0 10.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.265 1969-12-31 15:59:56.584 true NULL +-7 2541 528534767 NULL -7.0 2541.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.353 1969-12-31 15:59:57.374 true NULL +0 -3166 528534767 NULL 0.0 -3166.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:10.688 1969-12-31 16:00:01.385 true NULL +0 15626 528534767 NULL 0.0 15626.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.566 1969-12-31 16:00:15.217 true NULL +10 9366 528534767 NULL 10.0 9366.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.358 1969-12-31 15:59:50.592 true NULL +13 1358 528534767 NULL 13.0 1358.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:06.453 1969-12-31 16:00:00.423 true NULL +16 5780 528534767 NULL 16.0 5780.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.451 1969-12-31 16:00:12.752 true NULL +18 -3045 528534767 NULL 18.0 -3045.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:47.829 1969-12-31 16:00:05.045 true NULL +19 7952 528534767 NULL 19.0 7952.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:12.161 1969-12-31 16:00:00.95 true NULL +2 1345 528534767 NULL 2.0 1345.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.333 1969-12-31 16:00:00.517 true NULL +21 11737 528534767 NULL 21.0 11737.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.537 1969-12-31 15:59:45.022 true NULL +24 -4812 528534767 NULL 24.0 -4812.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.86 1969-12-31 15:59:55 true NULL +24 4432 528534767 NULL 24.0 4432.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:02.541 1969-12-31 16:00:10.895 true NULL +26 3961 528534767 NULL 26.0 3961.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:57.987 1969-12-31 15:59:52.232 true NULL +27 -14965 528534767 NULL 27.0 -14965.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:12.422 1969-12-31 16:00:09.517 true NULL +27 -7824 528534767 NULL 27.0 -7824.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.963 1969-12-31 15:59:56.474 true NULL +28 8035 528534767 NULL 28.0 8035.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:03.856 1969-12-31 15:59:55.95 true NULL +29 -1990 528534767 NULL 29.0 -1990.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:06.958 1969-12-31 15:59:52.902 true NULL +29 7021 528534767 NULL 29.0 7021.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:15.007 1969-12-31 16:00:15.148 true NULL +30 -814 528534767 NULL 30.0 -814.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.955 1969-12-31 16:00:11.799 true NULL +31 -9566 528534767 NULL 31.0 -9566.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.187 1969-12-31 16:00:06.961 true NULL +31 4963 528534767 NULL 31.0 4963.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.021 1969-12-31 16:00:02.997 true NULL +34 -15059 528534767 NULL 34.0 -15059.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.639 1969-12-31 16:00:13.206 true NULL +34 -4255 528534767 NULL 34.0 -4255.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.581 1969-12-31 15:59:57.88 true NULL +36 -15912 528534767 NULL 36.0 -15912.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.432 1969-12-31 16:00:04.376 true NULL +36 14907 528534767 NULL 36.0 14907.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:47.528 1969-12-31 15:59:47.206 true NULL +38 -11320 528534767 NULL 38.0 -11320.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.169 1969-12-31 16:00:03.822 true NULL +38 -4667 528534767 NULL 38.0 -4667.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.366 1969-12-31 15:59:52.334 true NULL +38 -6583 528534767 NULL 38.0 -6583.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:53.078 1969-12-31 16:00:06.722 true NULL +39 -10909 528534767 NULL 39.0 -10909.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.276 1969-12-31 16:00:12.738 true NULL +4 -14739 528534767 NULL 4.0 -14739.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.188 1969-12-31 16:00:15.26 true NULL +40 -1724 528534767 NULL 40.0 -1724.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:05.521 1969-12-31 15:59:57.835 true NULL +40 -7984 528534767 NULL 40.0 -7984.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.206 1969-12-31 16:00:02.59 true NULL +41 37 528534767 NULL 41.0 37.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.817 1969-12-31 15:59:53.672 true NULL +43 1475 528534767 NULL 43.0 1475.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.988 1969-12-31 16:00:03.442 true NULL +46 6958 528534767 NULL 46.0 6958.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.036 1969-12-31 16:00:10.191 true NULL +5 14625 528534767 NULL 5.0 14625.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:05.78 1969-12-31 16:00:15.34 true NULL +51 -15790 528534767 NULL 51.0 -15790.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.871 1969-12-31 15:59:57.821 true NULL +51 -4490 528534767 NULL 51.0 -4490.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.476 1969-12-31 15:59:49.318 true NULL +53 -10129 528534767 NULL 53.0 -10129.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.181 1969-12-31 16:00:08.061 true NULL +53 -12171 528534767 NULL 53.0 -12171.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.35 1969-12-31 15:59:57.549 true NULL +61 -1254 528534767 NULL 61.0 -1254.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:11.737 1969-12-31 16:00:12.004 true NULL +61 -15549 528534767 NULL 61.0 -15549.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.569 1969-12-31 15:59:51.665 true NULL +61 12161 528534767 NULL 61.0 12161.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:02.617 1969-12-31 16:00:10.536 true NULL +62 6557 528534767 NULL 62.0 6557.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.016 1969-12-31 16:00:00.367 true NULL +8 7860 528534767 NULL 8.0 7860.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.701 1969-12-31 16:00:01.97 true NULL +9 9169 528534767 NULL 9.0 9169.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:03.961 1969-12-31 16:00:14.126 true NULL +NULL -3012 528534767 NULL NULL -3012.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 16:00:03.756 true NULL +NULL -4213 528534767 NULL NULL -4213.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 16:00:13.589 true NULL +PREHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-10129 cvLH6Eat2yFsyy7p NULL +-10154 cvLH6Eat2yFsyy7p NULL +-10268 cvLH6Eat2yFsyy7p NULL +-1027 cvLH6Eat2yFsyy7p NULL +-10909 cvLH6Eat2yFsyy7p NULL +-11320 cvLH6Eat2yFsyy7p NULL +-11492 cvLH6Eat2yFsyy7p NULL +-12083 cvLH6Eat2yFsyy7p NULL +-12171 cvLH6Eat2yFsyy7p NULL +-12472 cvLH6Eat2yFsyy7p NULL +-1254 cvLH6Eat2yFsyy7p NULL +-1299 cvLH6Eat2yFsyy7p NULL +-13229 cvLH6Eat2yFsyy7p NULL +-13326 cvLH6Eat2yFsyy7p NULL +-13372 cvLH6Eat2yFsyy7p NULL +-14072 cvLH6Eat2yFsyy7p NULL +-14379 cvLH6Eat2yFsyy7p NULL +-14739 cvLH6Eat2yFsyy7p NULL +-14965 cvLH6Eat2yFsyy7p NULL +-15059 cvLH6Eat2yFsyy7p NULL +-15431 cvLH6Eat2yFsyy7p NULL +-15549 cvLH6Eat2yFsyy7p NULL +-15790 cvLH6Eat2yFsyy7p NULL +-15813 cvLH6Eat2yFsyy7p NULL +-15912 cvLH6Eat2yFsyy7p NULL +-1724 cvLH6Eat2yFsyy7p NULL +-1990 cvLH6Eat2yFsyy7p NULL +-2013 cvLH6Eat2yFsyy7p NULL +-2468 cvLH6Eat2yFsyy7p NULL +-3012 cvLH6Eat2yFsyy7p NULL +-3045 cvLH6Eat2yFsyy7p NULL +-3166 cvLH6Eat2yFsyy7p NULL +-3419 cvLH6Eat2yFsyy7p NULL +-4213 cvLH6Eat2yFsyy7p NULL +-4255 cvLH6Eat2yFsyy7p NULL +-4463 cvLH6Eat2yFsyy7p NULL +-4490 cvLH6Eat2yFsyy7p NULL +-4667 cvLH6Eat2yFsyy7p NULL +-4812 cvLH6Eat2yFsyy7p NULL +-6583 cvLH6Eat2yFsyy7p NULL +-6922 cvLH6Eat2yFsyy7p NULL +-7183 cvLH6Eat2yFsyy7p NULL +-7353 cvLH6Eat2yFsyy7p NULL +-7449 cvLH6Eat2yFsyy7p NULL +-75 cvLH6Eat2yFsyy7p NULL +-7735 cvLH6Eat2yFsyy7p NULL +-7824 cvLH6Eat2yFsyy7p NULL +-7964 cvLH6Eat2yFsyy7p NULL +-7984 cvLH6Eat2yFsyy7p NULL +-814 cvLH6Eat2yFsyy7p NULL +-9566 cvLH6Eat2yFsyy7p NULL +10 cvLH6Eat2yFsyy7p NULL +10688 cvLH6Eat2yFsyy7p NULL +11242 cvLH6Eat2yFsyy7p NULL +11737 cvLH6Eat2yFsyy7p NULL +1206 cvLH6Eat2yFsyy7p NULL +12161 cvLH6Eat2yFsyy7p NULL +12422 cvLH6Eat2yFsyy7p NULL +13026 cvLH6Eat2yFsyy7p NULL +13300 cvLH6Eat2yFsyy7p NULL +1345 cvLH6Eat2yFsyy7p NULL +1358 cvLH6Eat2yFsyy7p NULL +14072 cvLH6Eat2yFsyy7p NULL +14625 cvLH6Eat2yFsyy7p NULL +1475 cvLH6Eat2yFsyy7p NULL +14907 cvLH6Eat2yFsyy7p NULL +15007 cvLH6Eat2yFsyy7p NULL +15626 cvLH6Eat2yFsyy7p NULL +163 cvLH6Eat2yFsyy7p NULL +1639 cvLH6Eat2yFsyy7p NULL +2541 cvLH6Eat2yFsyy7p NULL +2617 cvLH6Eat2yFsyy7p NULL +3168 cvLH6Eat2yFsyy7p NULL +37 cvLH6Eat2yFsyy7p NULL +3856 cvLH6Eat2yFsyy7p NULL +3961 cvLH6Eat2yFsyy7p NULL +4181 cvLH6Eat2yFsyy7p NULL +4432 cvLH6Eat2yFsyy7p NULL +4587 cvLH6Eat2yFsyy7p NULL +486 cvLH6Eat2yFsyy7p NULL +4963 cvLH6Eat2yFsyy7p NULL +5521 cvLH6Eat2yFsyy7p NULL +5780 cvLH6Eat2yFsyy7p NULL +6453 cvLH6Eat2yFsyy7p NULL +6557 cvLH6Eat2yFsyy7p NULL +6958 cvLH6Eat2yFsyy7p NULL +7021 cvLH6Eat2yFsyy7p NULL +7350 cvLH6Eat2yFsyy7p NULL +7476 cvLH6Eat2yFsyy7p NULL +77 cvLH6Eat2yFsyy7p NULL +7860 cvLH6Eat2yFsyy7p NULL +7952 cvLH6Eat2yFsyy7p NULL +8035 cvLH6Eat2yFsyy7p NULL +834 cvLH6Eat2yFsyy7p NULL +8353 cvLH6Eat2yFsyy7p NULL +8402 cvLH6Eat2yFsyy7p NULL +8499 cvLH6Eat2yFsyy7p NULL +9169 cvLH6Eat2yFsyy7p NULL +9366 cvLH6Eat2yFsyy7p NULL +9472 cvLH6Eat2yFsyy7p NULL +PREHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 alice carson 2013-03-01 09:11:58.703074 +-2 alice nixon 2013-03-01 09:11:58.703321 +-2 alice underhill 2013-03-01 09:11:58.703122 +-2 alice underhill 2013-03-01 09:11:58.703127 +-2 alice xylophone 2013-03-01 09:11:58.703105 +-2 bob falkner 2013-03-01 09:11:58.703071 +-2 bob king 2013-03-01 09:11:58.703236 +-2 bob ovid 2013-03-01 09:11:58.703285 +-2 bob van buren 2013-03-01 09:11:58.703218 +-2 bob xylophone 2013-03-01 09:11:58.703219 +-2 calvin xylophone 2013-03-01 09:11:58.703083 +-2 david falkner 2013-03-01 09:11:58.703254 +-2 david laertes 2013-03-01 09:11:58.703076 +-2 david miller 2013-03-01 09:11:58.703238 +-3 alice allen 2013-03-01 09:11:58.703323 +-3 alice davidson 2013-03-01 09:11:58.703226 +-3 alice falkner 2013-03-01 09:11:58.703304 +-3 alice king 2013-03-01 09:11:58.70314 +-3 alice king 2013-03-01 09:11:58.703247 +-3 alice xylophone 2013-03-01 09:11:58.703129 +-3 bob ellison 2013-03-01 09:11:58.703261 +-3 bob falkner 2013-03-01 09:11:58.70328 +-3 bob ichabod 2013-03-01 09:11:58.70324 +-3 bob johnson 2013-03-01 09:11:58.703204 +-3 bob polk 2013-03-01 09:11:58.703128 +-3 bob underhill 2013-03-01 09:11:58.703176 +-3 bob underhill 2013-03-01 09:11:58.703188 +-3 bob van buren 2013-03-01 09:11:58.703199 +-3 calvin ichabod 2013-03-01 09:11:58.703213 +-3 calvin white 2013-03-01 09:11:58.703295 +-3 david carson 2013-03-01 09:11:58.703136 +-3 david falkner 2013-03-01 09:11:58.703305 +-3 david garcia 2013-03-01 09:11:58.70319 +-3 david hernandez 2013-03-01 09:11:58.703252 +-3 ethan steinbeck 2013-03-01 09:11:58.703079 +-3 ethan underhill 2013-03-01 09:11:58.703138 +-3 fred ellison 2013-03-01 09:11:58.703233 +-3 gabriella brown 2013-03-01 09:11:58.703288 +-3 holly nixon 2013-03-01 09:11:58.703262 +-3 holly polk 2013-03-01 09:11:58.703273 +-3 holly steinbeck 2013-03-01 09:11:58.703242 +-3 holly thompson 2013-03-01 09:11:58.703073 +-3 holly underhill 2013-03-01 09:11:58.703219 +-3 irene ellison 2013-03-01 09:11:58.703092 +-3 irene underhill 2013-03-01 09:11:58.703298 +-3 irene young 2013-03-01 09:11:58.703084 +-3 jessica johnson 2013-03-01 09:11:58.703319 +-3 jessica king 2013-03-01 09:11:58.703279 +-3 jessica miller 2013-03-01 09:11:58.703245 +-3 jessica white 2013-03-01 09:11:58.703199 +-3 katie ichabod 2013-03-01 09:11:58.703139 +-3 luke garcia 2013-03-01 09:11:58.703076 +-3 luke ichabod 2013-03-01 09:11:58.703294 +-3 luke king 2013-03-01 09:11:58.703207 +-3 luke young 2013-03-01 09:11:58.703182 +-3 mike allen 2013-03-01 09:11:58.703292 +-3 mike king 2013-03-01 09:11:58.703214 +-3 mike polk 2013-03-01 09:11:58.70319 +-3 mike white 2013-03-01 09:11:58.703087 +-3 mike xylophone 2013-03-01 09:11:58.703308 +-3 nick nixon 2013-03-01 09:11:58.703083 +-3 nick robinson 2013-03-01 09:11:58.703147 +-3 oscar davidson 2013-03-01 09:11:58.703071 +-3 oscar garcia 2013-03-01 09:11:58.703282 +-3 oscar johnson 2013-03-01 09:11:58.70311 +-3 oscar johnson 2013-03-01 09:11:58.703133 +-3 oscar miller 2013-03-01 09:11:58.70332 +-3 priscilla laertes 2013-03-01 09:11:58.70325 +-3 priscilla quirinius 2013-03-01 09:11:58.703228 +-3 priscilla zipper 2013-03-01 09:11:58.703321 +-3 quinn ellison 2013-03-01 09:11:58.703232 +-3 quinn polk 2013-03-01 09:11:58.703244 +-3 rachel davidson 2013-03-01 09:11:58.703316 +-3 rachel thompson 2013-03-01 09:11:58.703276 +-3 sarah miller 2013-03-01 09:11:58.70316 +-3 sarah robinson 2013-03-01 09:11:58.703288 +-3 sarah xylophone 2013-03-01 09:11:58.703112 +-3 sarah zipper 2013-03-01 09:11:58.703289 +-3 tom hernandez 2013-03-01 09:11:58.703108 +-3 tom hernandez 2013-03-01 09:11:58.703188 +-3 tom polk 2013-03-01 09:11:58.703217 +-3 tom steinbeck 2013-03-01 09:11:58.703251 +-3 ulysses carson 2013-03-01 09:11:58.703253 +-3 ulysses ellison 2013-03-01 09:11:58.703197 +-3 ulysses quirinius 2013-03-01 09:11:58.703189 +-3 ulysses robinson 2013-03-01 09:11:58.703227 +-3 ulysses steinbeck 2013-03-01 09:11:58.703259 +-3 victor allen 2013-03-01 09:11:58.703155 +-3 victor hernandez 2013-03-01 09:11:58.703176 +-3 victor robinson 2013-03-01 09:11:58.703305 +-3 victor thompson 2013-03-01 09:11:58.703299 +-3 victor xylophone 2013-03-01 09:11:58.703135 +-3 wendy quirinius 2013-03-01 09:11:58.703266 +-3 wendy robinson 2013-03-01 09:11:58.703294 +-3 wendy xylophone 2013-03-01 09:11:58.703191 +-3 xavier garcia 2013-03-01 09:11:58.703194 +-3 xavier ovid 2013-03-01 09:11:58.703148 +-3 yuri xylophone 2013-03-01 09:11:58.703258 +-3 zach thompson 2013-03-01 09:11:58.703252 +-3 zach young 2013-03-01 09:11:58.703191 +PREHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-10129 cvLH6Eat2yFsyy7p NULL +-10154 cvLH6Eat2yFsyy7p NULL +-10268 cvLH6Eat2yFsyy7p NULL +-1027 cvLH6Eat2yFsyy7p NULL +-10909 cvLH6Eat2yFsyy7p NULL +-11320 cvLH6Eat2yFsyy7p NULL +-11492 cvLH6Eat2yFsyy7p NULL +-12083 cvLH6Eat2yFsyy7p NULL +-12171 cvLH6Eat2yFsyy7p NULL +-12472 cvLH6Eat2yFsyy7p NULL +-1254 cvLH6Eat2yFsyy7p NULL +-1299 cvLH6Eat2yFsyy7p NULL +-13229 cvLH6Eat2yFsyy7p NULL +-13326 cvLH6Eat2yFsyy7p NULL +-13372 cvLH6Eat2yFsyy7p NULL +-14072 cvLH6Eat2yFsyy7p NULL +-14379 cvLH6Eat2yFsyy7p NULL +-14739 cvLH6Eat2yFsyy7p NULL +-14965 cvLH6Eat2yFsyy7p NULL +-15059 cvLH6Eat2yFsyy7p NULL +-15431 cvLH6Eat2yFsyy7p NULL +-15549 cvLH6Eat2yFsyy7p NULL +-15790 cvLH6Eat2yFsyy7p NULL +-15813 cvLH6Eat2yFsyy7p NULL +-15912 cvLH6Eat2yFsyy7p NULL +-1724 cvLH6Eat2yFsyy7p NULL +-1990 cvLH6Eat2yFsyy7p NULL +-2013 cvLH6Eat2yFsyy7p NULL +-2468 cvLH6Eat2yFsyy7p NULL +-3012 cvLH6Eat2yFsyy7p NULL +-3045 cvLH6Eat2yFsyy7p NULL +-3166 cvLH6Eat2yFsyy7p NULL +-3419 cvLH6Eat2yFsyy7p NULL +-4213 cvLH6Eat2yFsyy7p NULL +-4255 cvLH6Eat2yFsyy7p NULL +-4463 cvLH6Eat2yFsyy7p NULL +-4490 cvLH6Eat2yFsyy7p NULL +-4667 cvLH6Eat2yFsyy7p NULL +-4812 cvLH6Eat2yFsyy7p NULL +-6583 cvLH6Eat2yFsyy7p NULL +-6922 cvLH6Eat2yFsyy7p NULL +-7183 cvLH6Eat2yFsyy7p NULL +-7353 cvLH6Eat2yFsyy7p NULL +-7449 cvLH6Eat2yFsyy7p NULL +-75 cvLH6Eat2yFsyy7p NULL +-7735 cvLH6Eat2yFsyy7p NULL +-7824 cvLH6Eat2yFsyy7p NULL +-7964 cvLH6Eat2yFsyy7p NULL +-7984 cvLH6Eat2yFsyy7p NULL +-814 cvLH6Eat2yFsyy7p NULL +-9566 cvLH6Eat2yFsyy7p NULL +10 cvLH6Eat2yFsyy7p NULL +10688 cvLH6Eat2yFsyy7p NULL +11242 cvLH6Eat2yFsyy7p NULL +11737 cvLH6Eat2yFsyy7p NULL +1206 cvLH6Eat2yFsyy7p NULL +12161 cvLH6Eat2yFsyy7p NULL +12422 cvLH6Eat2yFsyy7p NULL +13026 cvLH6Eat2yFsyy7p NULL +13300 cvLH6Eat2yFsyy7p NULL +1345 cvLH6Eat2yFsyy7p NULL +1358 cvLH6Eat2yFsyy7p NULL +14072 cvLH6Eat2yFsyy7p NULL +14625 cvLH6Eat2yFsyy7p NULL +1475 cvLH6Eat2yFsyy7p NULL +14907 cvLH6Eat2yFsyy7p NULL +15007 cvLH6Eat2yFsyy7p NULL +15626 cvLH6Eat2yFsyy7p NULL +163 cvLH6Eat2yFsyy7p NULL +1639 cvLH6Eat2yFsyy7p NULL +2541 cvLH6Eat2yFsyy7p NULL +2617 cvLH6Eat2yFsyy7p NULL +3168 cvLH6Eat2yFsyy7p NULL +37 cvLH6Eat2yFsyy7p NULL +3856 cvLH6Eat2yFsyy7p NULL +3961 cvLH6Eat2yFsyy7p NULL +4181 cvLH6Eat2yFsyy7p NULL +4432 cvLH6Eat2yFsyy7p NULL +4587 cvLH6Eat2yFsyy7p NULL +486 cvLH6Eat2yFsyy7p NULL +4963 cvLH6Eat2yFsyy7p NULL +5521 cvLH6Eat2yFsyy7p NULL +5780 cvLH6Eat2yFsyy7p NULL +6453 cvLH6Eat2yFsyy7p NULL +6557 cvLH6Eat2yFsyy7p NULL +6958 cvLH6Eat2yFsyy7p NULL +7021 cvLH6Eat2yFsyy7p NULL +7350 cvLH6Eat2yFsyy7p NULL +7476 cvLH6Eat2yFsyy7p NULL +77 cvLH6Eat2yFsyy7p NULL +7860 cvLH6Eat2yFsyy7p NULL +7952 cvLH6Eat2yFsyy7p NULL +8035 cvLH6Eat2yFsyy7p NULL +834 cvLH6Eat2yFsyy7p NULL +8353 cvLH6Eat2yFsyy7p NULL +8402 cvLH6Eat2yFsyy7p NULL +8499 cvLH6Eat2yFsyy7p NULL +9169 cvLH6Eat2yFsyy7p NULL +9366 cvLH6Eat2yFsyy7p NULL +9472 cvLH6Eat2yFsyy7p NULL +PREHOOK: query: DROP TABLE text_llap +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@text_llap +PREHOOK: Output: default@text_llap +POSTHOOK: query: DROP TABLE text_llap +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@text_llap +POSTHOOK: Output: default@text_llap diff --git a/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out b/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out new file mode 100644 index 0000000..6900cdb --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out @@ -0,0 +1,283 @@ +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: drop table llap_temp_table +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table llap_temp_table +POSTHOOK: type: DROPTABLE +PREHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cbigint is not null +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cbigint is not null +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cint > 10) and cbigint is not null) (type: boolean) + Filter Operator + predicate: ((cint > 10) and cbigint is not null) (type: boolean) + Select Operator + expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean), cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 + ListSink + +PREHOOK: query: create table llap_temp_table as +select * from orc_llap_n0 where cint > 10 and cbigint is not null +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: database:default +PREHOOK: Output: default@llap_temp_table +POSTHOOK: query: create table llap_temp_table as +select * from orc_llap_n0 where cint > 10 and cbigint is not null +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@llap_temp_table +POSTHOOK: Lineage: llap_temp_table.cbigint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cboolean1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cboolean2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cdecimal1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cdecimal1, type:decimal(10,2), comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cdecimal2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cdecimal2, type:decimal(38,5), comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cdouble SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cfloat SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.csmallint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cstring1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cstring2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.ctimestamp1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.ctimestamp2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.ctinyint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: select sum(hash(*)) from llap_temp_table +PREHOOK: type: QUERY +PREHOOK: Input: default@llap_temp_table +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from llap_temp_table +POSTHOOK: type: QUERY +POSTHOOK: Input: default@llap_temp_table +#### A masked pattern was here #### +212787774304 +PREHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cint < 5000000 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cint < 5000000 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cint > 10) and (cint < 5000000)) (type: boolean) + Filter Operator + predicate: ((cint < 5000000) and (cint > 10)) (type: boolean) + Select Operator + expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean), cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 + ListSink + +PREHOOK: query: select * from orc_llap_n0 where cint > 10 and cint < 5000000 +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +POSTHOOK: query: select * from orc_llap_n0 where cint > 10 and cint < 5000000 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +-51 NULL 6981 707684071 -51.0 NULL YdG61y00526u5 G71l66F25 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 762 1587111633 -51.0 NULL q5y2Vy1 UbUx5 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 6981 -1569596201 -51.0 NULL o4lvY20511w0EOX3P3I82p63 J6YIW3yQlW3GydlRm 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 2949963 -1580871111 -51.0 NULL 0K68k3bdl7jO7 TPPAu 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 2089466 -240556350 -51.0 NULL cXX24dH7tblSj46j2g C31eea0wrHHqvj 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +-51 NULL 6981 -471484665 -51.0 NULL 4KhrrQ0nJ7bMNTvhSCA R31tq72k1528DQ5C3Y4cNub 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 762 -755927849 -51.0 NULL a10E76jX35YwquKCTA s7473frMk58vm 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +NULL 1016 3432650 1864027286 NULL 1016.0 0SPVSOVDI73t 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.364 false true 3.35 5.56789 +NULL 10144 4756105 1864027286 NULL 10144.0 bvoO6VwRmH6181mdOm87Do 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.134 true true 3.35 5.56789 +NULL 10653 3887593 1864027286 NULL 10653.0 2wak50xB5nHswbX 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:48.858 false true 3.35 5.56789 +NULL 10782 1286921 1864027286 NULL 10782.0 ODLrXI8882q8LS8 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.138 true true 3.35 5.56789 +NULL 197 762 1864027286 NULL 2563.58 3WsVeqb28VWEEOLI8ail 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:45.603 true true 3.35 5.56789 +NULL 1535 86028 1864027286 NULL 1535.0 T2o8XRFAL0HC4ikDQnfoCymw 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.662 true true 3.35 5.56789 +NULL 5064 504142 1864027286 NULL 5064.0 PlOxor04p5cvVl 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:09.828 true true 3.35 5.56789 +NULL -3799 1248059 1864027286 NULL -3799.0 Uhps6mMh3IfHB3j7yH62K 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.622 false true 3.35 5.56789 +NULL 10299 799471 1864027286 NULL 10299.0 2fu24 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.516 false true 3.35 5.56789 +NULL -8915 2101183 1864027286 NULL -8915.0 x7By66525 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:05.831 false true 3.35 5.56789 +8 NULL 2433892 -1611863517 8.0 NULL 674ILv3V2TxFqXP6wSbL VLprkK2XfX 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 3073556 332961835 8.0 NULL rR855m18hps5nkaFqE43W pH15gLf8B4yNFDWFH74 1969-12-31 16:00:15.892 NULL true true 3.35 5.56789 +8 NULL 6981 627355276 8.0 NULL K630vaVf 7gDn3I45FGIX0J6JH74PCEN 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 2229621 -381406148 8.0 NULL q7onkS7QRPh5ghOK oKb0bi 1969-12-31 16:00:15.892 NULL true false 3.35 5.56789 +NULL 359 6981 -1887561756 NULL 9763215.5639 sF2CRfgt2K 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:12.489 false false 3.35 5.56789 +NULL -12328 3253295 -1887561756 NULL -12328.0 Ut5NYg5XWb 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:57.985 true false 3.35 5.56789 +11 NULL 1000828 1531084669 11.0 NULL wM316f6NqGIkoP388j3F6 poWQQo3Upvt3Wh 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -1908387379 11.0 NULL a3EhVU6Wuy7ycJ7wY7h2gv 0542kSCNs54o7tD6e2YuI3 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 1310786 -413875656 11.0 NULL W0rvA4H1xn0xMG4uk0 8yVVjG 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -667592125 11.0 NULL NULL xIVF2uu7 1969-12-31 16:00:02.351 NULL NULL true 3.35 5.56789 +11 NULL 3583612 -1172590956 11.0 NULL hrSdTD2Q05 mJ5nwN6o4s8Hi4 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 6981 1532810435 11.0 NULL Y5x3JuI3M8jngv5N L760FuvYP 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 4972984 -483828108 11.0 NULL Sf45K8ueb68jp6s8 jPWX6Wr4fmTBSc5HSlX1r 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 762 -1005594359 11.0 NULL BLoMwUJ51ns6pd FtT7S 1969-12-31 16:00:02.351 NULL false false 3.35 5.56789 +NULL 359 762 -1645852809 NULL 9763215.5639 40ks5556SV xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:55.352 false false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 o5mb0QP5Y48Qd4vdB0 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.062 true false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 1FNNhmiFLGw425NA13g xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.463 false false 3.35 5.56789 +NULL -13036 1288927 -1645852809 NULL -13036.0 yinBY725P7V2 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:00.763 true false 3.35 5.56789 +-51 NULL 6981 707684071 -51.0 NULL YdG61y00526u5 G71l66F25 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 762 1587111633 -51.0 NULL q5y2Vy1 UbUx5 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 6981 -1569596201 -51.0 NULL o4lvY20511w0EOX3P3I82p63 J6YIW3yQlW3GydlRm 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 2949963 -1580871111 -51.0 NULL 0K68k3bdl7jO7 TPPAu 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 2089466 -240556350 -51.0 NULL cXX24dH7tblSj46j2g C31eea0wrHHqvj 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +-51 NULL 6981 -471484665 -51.0 NULL 4KhrrQ0nJ7bMNTvhSCA R31tq72k1528DQ5C3Y4cNub 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 762 -755927849 -51.0 NULL a10E76jX35YwquKCTA s7473frMk58vm 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +NULL 1016 3432650 1864027286 NULL 1016.0 0SPVSOVDI73t 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.364 false true 3.35 5.56789 +NULL 10144 4756105 1864027286 NULL 10144.0 bvoO6VwRmH6181mdOm87Do 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.134 true true 3.35 5.56789 +NULL 10653 3887593 1864027286 NULL 10653.0 2wak50xB5nHswbX 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:48.858 false true 3.35 5.56789 +NULL 10782 1286921 1864027286 NULL 10782.0 ODLrXI8882q8LS8 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.138 true true 3.35 5.56789 +NULL 197 762 1864027286 NULL 2563.58 3WsVeqb28VWEEOLI8ail 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:45.603 true true 3.35 5.56789 +NULL 1535 86028 1864027286 NULL 1535.0 T2o8XRFAL0HC4ikDQnfoCymw 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.662 true true 3.35 5.56789 +NULL 5064 504142 1864027286 NULL 5064.0 PlOxor04p5cvVl 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:09.828 true true 3.35 5.56789 +NULL -3799 1248059 1864027286 NULL -3799.0 Uhps6mMh3IfHB3j7yH62K 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.622 false true 3.35 5.56789 +NULL 10299 799471 1864027286 NULL 10299.0 2fu24 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.516 false true 3.35 5.56789 +NULL -8915 2101183 1864027286 NULL -8915.0 x7By66525 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:05.831 false true 3.35 5.56789 +8 NULL 2433892 -1611863517 8.0 NULL 674ILv3V2TxFqXP6wSbL VLprkK2XfX 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 3073556 332961835 8.0 NULL rR855m18hps5nkaFqE43W pH15gLf8B4yNFDWFH74 1969-12-31 16:00:15.892 NULL true true 3.35 5.56789 +8 NULL 6981 627355276 8.0 NULL K630vaVf 7gDn3I45FGIX0J6JH74PCEN 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 2229621 -381406148 8.0 NULL q7onkS7QRPh5ghOK oKb0bi 1969-12-31 16:00:15.892 NULL true false 3.35 5.56789 +NULL 359 6981 -1887561756 NULL 9763215.5639 sF2CRfgt2K 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:12.489 false false 3.35 5.56789 +NULL -12328 3253295 -1887561756 NULL -12328.0 Ut5NYg5XWb 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:57.985 true false 3.35 5.56789 +11 NULL 1000828 1531084669 11.0 NULL wM316f6NqGIkoP388j3F6 poWQQo3Upvt3Wh 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -1908387379 11.0 NULL a3EhVU6Wuy7ycJ7wY7h2gv 0542kSCNs54o7tD6e2YuI3 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 1310786 -413875656 11.0 NULL W0rvA4H1xn0xMG4uk0 8yVVjG 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -667592125 11.0 NULL NULL xIVF2uu7 1969-12-31 16:00:02.351 NULL NULL true 3.35 5.56789 +11 NULL 3583612 -1172590956 11.0 NULL hrSdTD2Q05 mJ5nwN6o4s8Hi4 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 6981 1532810435 11.0 NULL Y5x3JuI3M8jngv5N L760FuvYP 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 4972984 -483828108 11.0 NULL Sf45K8ueb68jp6s8 jPWX6Wr4fmTBSc5HSlX1r 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 762 -1005594359 11.0 NULL BLoMwUJ51ns6pd FtT7S 1969-12-31 16:00:02.351 NULL false false 3.35 5.56789 +NULL 359 762 -1645852809 NULL 9763215.5639 40ks5556SV xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:55.352 false false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 o5mb0QP5Y48Qd4vdB0 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.062 true false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 1FNNhmiFLGw425NA13g xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.463 false false 3.35 5.56789 +NULL -13036 1288927 -1645852809 NULL -13036.0 yinBY725P7V2 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:00.763 true false 3.35 5.56789 +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: drop table llap_temp_table +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@llap_temp_table +PREHOOK: Output: default@llap_temp_table +POSTHOOK: query: drop table llap_temp_table +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@llap_temp_table +POSTHOOK: Output: default@llap_temp_table diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out index bcd4144..69751a9 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out @@ -50,7 +50,7 @@ Table Parameters: numFiles 1 numRows 5 rawDataSize 1025 - totalSize 497 + totalSize 503 #### A masked pattern was here #### # Storage Information @@ -111,7 +111,7 @@ Table Parameters: numFiles 1 numRows 5 rawDataSize 580 - totalSize 346 + totalSize 348 #### A masked pattern was here #### # Storage Information @@ -247,7 +247,7 @@ key value numFiles 1 numRows 5 rawDataSize 1605 -totalSize 701 +totalSize 702 #### A masked pattern was here #### PREHOOK: query: drop materialized view cmv_mat_view_n4 PREHOOK: type: DROP_MATERIALIZED_VIEW diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out index 60e7f32..70649bd 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out @@ -69,7 +69,7 @@ bucketing_version 2 numFiles 1 numRows 2 rawDataSize 408 -totalSize 453 +totalSize 457 #### A masked pattern was here #### PREHOOK: query: create materialized view if not exists cmv_mat_view2_n4 enable rewrite as select a, c from cmv_basetable_n10 where a = 3 @@ -102,7 +102,7 @@ bucketing_version 2 numFiles 1 numRows 2 rawDataSize 232 -totalSize 322 +totalSize 326 #### A masked pattern was here #### PREHOOK: query: explain select a, c from cmv_basetable_n10 where a = 3 diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out index cca7d9f..0d5da8e 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out @@ -749,7 +749,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_2 - Statistics: Num rows: 42 Data size: 4872 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 43 Data size: 4988 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((c > 10.1) and a is not null) (type: boolean) Statistics: Num rows: 14 Data size: 1624 Basic stats: COMPLETE Column stats: COMPLETE @@ -1027,7 +1027,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_2 - Statistics: Num rows: 42 Data size: 4872 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 43 Data size: 4988 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((c > 10.1) and a is not null) (type: boolean) Statistics: Num rows: 14 Data size: 1624 Basic stats: COMPLETE Column stats: COMPLETE diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out index 7aef9be..f01591e 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out @@ -268,7 +268,7 @@ Table Parameters: numFiles 2 numRows 2 rawDataSize 248 - totalSize 706 + totalSize 736 transactional true transactional_properties default #### A masked pattern was here #### @@ -495,7 +495,7 @@ Table Parameters: numFiles 2 numRows 2 rawDataSize 248 - totalSize 706 + totalSize 736 transactional true transactional_properties default #### A masked pattern was here #### @@ -937,7 +937,7 @@ Table Type: MATERIALIZED_VIEW Table Parameters: bucketing_version 2 numFiles 3 - totalSize 1451 + totalSize 1512 transactional true transactional_properties default #### A masked pattern was here #### @@ -1064,7 +1064,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_2_n2 - Statistics: Num rows: 60 Data size: 7200 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 61 Data size: 7320 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((c > 10) and a is not null) (type: boolean) Statistics: Num rows: 20 Data size: 2400 Basic stats: COMPLETE Column stats: COMPLETE @@ -1283,19 +1283,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_2_n2 - Statistics: Num rows: 70 Data size: 8400 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 72 Data size: 8640 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((c > 10) and a is not null) (type: boolean) - Statistics: Num rows: 23 Data size: 2760 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 24 Data size: 2880 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: a (type: int), c (type: decimal(10,2)), d (type: int) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 23 Data size: 2760 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 24 Data size: 2880 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 23 Data size: 2760 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 24 Data size: 2880 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(10,2)), _col2 (type: int) Execution mode: llap LLAP IO: may be used (ACID table) @@ -1309,7 +1309,7 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col2, _col3 - Statistics: Num rows: 38 Data size: 4560 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40 Data size: 4800 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(_col3) keys: _col0 (type: int), _col2 (type: decimal(10,2)) @@ -1532,7 +1532,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_2_n2 - Statistics: Num rows: 85 Data size: 10200 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 88 Data size: 10560 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((ROW__ID.writeid > 4) and (c > 10) and a is not null) (type: boolean) Statistics: Num rows: 9 Data size: 1080 Basic stats: COMPLETE Column stats: COMPLETE diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out index 2f0bf3d..26791e4 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out @@ -400,7 +400,7 @@ Table Type: MATERIALIZED_VIEW Table Parameters: bucketing_version 2 numFiles 2 - totalSize 1053 + totalSize 1078 transactional true transactional_properties default #### A masked pattern was here #### @@ -523,7 +523,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_2_n3 - Statistics: Num rows: 60 Data size: 6960 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 61 Data size: 7076 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((c > 10) and a is not null) (type: boolean) Statistics: Num rows: 20 Data size: 2320 Basic stats: COMPLETE Column stats: COMPLETE @@ -731,19 +731,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_2_n3 - Statistics: Num rows: 70 Data size: 8120 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 72 Data size: 8352 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((c > 10) and a is not null) (type: boolean) - Statistics: Num rows: 23 Data size: 2668 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 24 Data size: 2784 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: a (type: int), c (type: decimal(10,2)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 23 Data size: 2668 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 24 Data size: 2784 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 23 Data size: 2668 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 24 Data size: 2784 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(10,2)) Execution mode: llap LLAP IO: may be used (ACID table) @@ -757,14 +757,14 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col2 - Statistics: Num rows: 38 Data size: 4408 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40 Data size: 4640 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: _col0 (type: int), _col2 (type: decimal(10,2)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 38 Data size: 4408 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40 Data size: 4640 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 38 Data size: 4408 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40 Data size: 4640 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat @@ -774,7 +774,7 @@ STAGE PLANS: Select Operator expressions: _col0 (type: int), _col1 (type: decimal(10,2)) outputColumnNames: a, c - Statistics: Num rows: 38 Data size: 4408 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40 Data size: 4640 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: compute_stats(a, 'hll'), compute_stats(c, 'hll') mode: hash @@ -943,7 +943,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_2_n3 - Statistics: Num rows: 85 Data size: 9860 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 88 Data size: 10208 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((ROW__ID.writeid > 4) and (c > 10) and a is not null) (type: boolean) Statistics: Num rows: 9 Data size: 1044 Basic stats: COMPLETE Column stats: COMPLETE diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out index 9116089..e570efe 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out @@ -69,7 +69,7 @@ bucketing_version 2 numFiles 1 numRows 2 rawDataSize 408 -totalSize 453 +totalSize 457 #### A masked pattern was here #### PREHOOK: query: create materialized view if not exists cmv_mat_view2 enable rewrite as select a, c from cmv_basetable_n0 where a = 3 @@ -102,7 +102,7 @@ bucketing_version 2 numFiles 1 numRows 2 rawDataSize 232 -totalSize 322 +totalSize 326 #### A masked pattern was here #### PREHOOK: query: explain select a, c from cmv_basetable_n0 where a = 3 diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_multi_db.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_multi_db.q.out index 4cf7bce..284d0a9 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_multi_db.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_multi_db.q.out @@ -93,7 +93,7 @@ bucketing_version 2 numFiles 1 numRows 2 rawDataSize 408 -totalSize 453 +totalSize 457 #### A masked pattern was here #### PREHOOK: query: create materialized view if not exists cmv_mat_view2_n2 enable rewrite as select a, c from db1.cmv_basetable_n7 where a = 3 @@ -126,7 +126,7 @@ bucketing_version 2 numFiles 1 numRows 2 rawDataSize 232 -totalSize 322 +totalSize 326 #### A masked pattern was here #### PREHOOK: query: create database db3 PREHOOK: type: CREATEDATABASE diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out index 338a848..0a310a3 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out @@ -749,7 +749,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_2_n0 - Statistics: Num rows: 42 Data size: 4872 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 43 Data size: 4988 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((c > 10.1) and a is not null) (type: boolean) Statistics: Num rows: 14 Data size: 1624 Basic stats: COMPLETE Column stats: COMPLETE @@ -1027,7 +1027,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cmv_basetable_2_n0 - Statistics: Num rows: 42 Data size: 4872 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 43 Data size: 4988 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((c > 10.1) and a is not null) (type: boolean) Statistics: Num rows: 14 Data size: 1624 Basic stats: COMPLETE Column stats: COMPLETE diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out index 164568c..f82aa10 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out @@ -255,7 +255,7 @@ Table Parameters: numRows 2 rawDataSize 232 rewriting.time.window 300s - totalSize 586 + totalSize 608 #### A masked pattern was here #### # Storage Information @@ -477,7 +477,7 @@ Table Parameters: numRows 2 rawDataSize 232 rewriting.time.window 300s - totalSize 586 + totalSize 608 #### A masked pattern was here #### # Storage Information @@ -780,7 +780,7 @@ Table Parameters: numRows 3 rawDataSize 348 rewriting.time.window 300s - totalSize 616 + totalSize 628 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out index 272cbec..70addc4 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out @@ -73,7 +73,7 @@ Table Parameters: numFiles 1 numRows 5 rawDataSize 580 - totalSize 346 + totalSize 348 #### A masked pattern was here #### # Storage Information @@ -100,7 +100,7 @@ key foo numFiles 1 numRows 5 rawDataSize 580 -totalSize 346 +totalSize 348 #### A masked pattern was here #### PREHOOK: query: select a, c from cmv_mat_view_n8 PREHOOK: type: QUERY @@ -242,7 +242,7 @@ Table Parameters: numFiles 1 numRows 5 rawDataSize 1025 - totalSize 497 + totalSize 503 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_drop.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_drop.q.out index ceb34f7..441f105 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_drop.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_drop.q.out @@ -20,9 +20,9 @@ columns:struct columns { i32 cint, string cstring1} partitioned:false partitionColumns: totalNumberFiles:1 -totalFileSize:47120 -maxFileSize:47120 -minFileSize:47120 +totalFileSize:47137 +maxFileSize:47137 +minFileSize:47137 #### A masked pattern was here #### PREHOOK: query: drop materialized view dmv_mat_view diff --git a/ql/src/test/results/clientpositive/llap/mergejoin.q.out b/ql/src/test/results/clientpositive/llap/mergejoin.q.out index 832ed48..b240b11 100644 --- a/ql/src/test/results/clientpositive/llap/mergejoin.q.out +++ b/ql/src/test/results/clientpositive/llap/mergejoin.q.out @@ -64,8 +64,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -148,8 +147,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -445,8 +443,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -498,8 +496,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1620,8 +1618,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1665,8 +1663,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1816,8 +1814,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1861,8 +1859,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2012,8 +2010,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2057,8 +2055,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2220,8 +2218,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2304,8 +2302,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2356,8 +2353,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2618,8 +2615,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2671,8 +2668,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2834,8 +2831,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2887,8 +2884,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2940,8 +2937,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2993,8 +2990,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3151,8 +3148,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3204,8 +3201,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3365,8 +3362,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3449,8 +3446,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3501,8 +3497,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3771,8 +3767,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3824,8 +3820,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3877,8 +3873,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3930,8 +3926,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4101,8 +4097,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4153,8 +4149,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/orc_analyze.q.out b/ql/src/test/results/clientpositive/llap/orc_analyze.q.out index abbbbf9..14bf186 100644 --- a/ql/src/test/results/clientpositive/llap/orc_analyze.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_analyze.q.out @@ -102,7 +102,7 @@ Table Parameters: numFiles 1 numRows 100 rawDataSize 52600 - totalSize 3200 + totalSize 3236 #### A masked pattern was here #### # Storage Information @@ -150,7 +150,7 @@ Table Parameters: numFiles 1 numRows 100 rawDataSize 52600 - totalSize 3200 + totalSize 3236 #### A masked pattern was here #### # Storage Information @@ -237,7 +237,7 @@ Table Parameters: numFiles 1 numRows 100 rawDataSize 52600 - totalSize 3200 + totalSize 3236 #### A masked pattern was here #### # Storage Information @@ -345,7 +345,7 @@ Partition Parameters: numFiles 1 numRows 50 rawDataSize 21950 - totalSize 2099 + totalSize 2134 #### A masked pattern was here #### # Storage Information @@ -386,7 +386,7 @@ Partition Parameters: numFiles 1 numRows 50 rawDataSize 22050 - totalSize 2114 + totalSize 2147 #### A masked pattern was here #### # Storage Information @@ -439,7 +439,7 @@ Partition Parameters: numFiles 1 numRows 50 rawDataSize 21950 - totalSize 2099 + totalSize 2134 #### A masked pattern was here #### # Storage Information @@ -480,7 +480,7 @@ Partition Parameters: numFiles 1 numRows 50 rawDataSize 22050 - totalSize 2114 + totalSize 2147 #### A masked pattern was here #### # Storage Information @@ -576,7 +576,7 @@ Partition Parameters: numFiles 1 numRows 50 rawDataSize 21950 - totalSize 2099 + totalSize 2134 #### A masked pattern was here #### # Storage Information @@ -617,7 +617,7 @@ Partition Parameters: numFiles 1 numRows 50 rawDataSize 22050 - totalSize 2114 + totalSize 2147 #### A masked pattern was here #### # Storage Information @@ -731,7 +731,7 @@ Partition Parameters: numFiles 4 numRows 50 rawDataSize 21955 - totalSize 5322 + totalSize 5394 #### A masked pattern was here #### # Storage Information @@ -772,7 +772,7 @@ Partition Parameters: numFiles 4 numRows 50 rawDataSize 22043 - totalSize 5314 + totalSize 5388 #### A masked pattern was here #### # Storage Information @@ -825,7 +825,7 @@ Partition Parameters: numFiles 4 numRows 50 rawDataSize 21955 - totalSize 5322 + totalSize 5394 #### A masked pattern was here #### # Storage Information @@ -866,7 +866,7 @@ Partition Parameters: numFiles 4 numRows 50 rawDataSize 22043 - totalSize 5314 + totalSize 5388 #### A masked pattern was here #### # Storage Information @@ -968,7 +968,7 @@ Partition Parameters: numFiles 4 numRows 50 rawDataSize 21955 - totalSize 5322 + totalSize 5394 #### A masked pattern was here #### # Storage Information @@ -1009,7 +1009,7 @@ Partition Parameters: numFiles 4 numRows 50 rawDataSize 22043 - totalSize 5314 + totalSize 5388 #### A masked pattern was here #### # Storage Information @@ -1117,7 +1117,7 @@ Partition Parameters: numFiles 1 numRows 50 rawDataSize 21950 - totalSize 2099 + totalSize 2134 #### A masked pattern was here #### # Storage Information @@ -1170,7 +1170,7 @@ Partition Parameters: numFiles 1 numRows 50 rawDataSize 21950 - totalSize 2099 + totalSize 2134 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out b/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out index 435e3fc..c4fe46e 100644 --- a/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out @@ -233,7 +233,7 @@ Table Parameters: orc.bloom.filter.columns * orc.row.index.stride 1000 rawDataSize 1139514 - totalSize 55376 + totalSize 55453 #### A masked pattern was here #### # Storage Information @@ -251,7 +251,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n1 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16673 + HDFS_BYTES_READ: 16676 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 7 HDFS_LARGE_READ_OPS: 0 diff --git a/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out b/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out index c078326..6dec42f 100644 --- a/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out @@ -233,7 +233,7 @@ Table Parameters: orc.bloom.filter.columns * orc.row.index.stride 1000 rawDataSize 1139514 - totalSize 55376 + totalSize 55453 #### A masked pattern was here #### # Storage Information @@ -251,7 +251,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17728 + HDFS_BYTES_READ: 17731 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 8 HDFS_LARGE_READ_OPS: 0 diff --git a/ql/src/test/results/clientpositive/llap/orc_merge1.q.out b/ql/src/test/results/clientpositive/llap/orc_merge1.q.out index f5d94f9..7189964 100644 --- a/ql/src/test/results/clientpositive/llap/orc_merge1.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_merge1.q.out @@ -162,12 +162,12 @@ POSTHOOK: Lineage: orcfile_merge1_n1 PARTITION(ds=1,part=0).value SIMPLE [(src)s POSTHOOK: Lineage: orcfile_merge1_n1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: orcfile_merge1_n1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Found 6 items --rw-r--r-- 3 ### USER ### ### GROUP ### 543 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-r--r-- 3 ### USER ### ### GROUP ### 550 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-r--r-- 3 ### USER ### ### GROUP ### 549 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-r--r-- 3 ### USER ### ### GROUP ### 485 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-r--r-- 3 ### USER ### ### GROUP ### 542 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-r--r-- 3 ### USER ### ### GROUP ### 467 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 555 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 562 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 561 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 496 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 554 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 478 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE orcfile_merge1b_n1 PARTITION (ds='1', part) SELECT key, value, PMOD(HASH(key), 2) as part @@ -340,7 +340,7 @@ POSTHOOK: Lineage: orcfile_merge1b_n1 PARTITION(ds=1,part=0).value SIMPLE [(src) POSTHOOK: Lineage: orcfile_merge1b_n1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: orcfile_merge1b_n1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Found 1 items --rw-rw-rw- 3 ### USER ### ### GROUP ### 1344 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 1360 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE orcfile_merge1c_n1 PARTITION (ds='1', part) SELECT key, value, PMOD(HASH(key), 2) as part @@ -505,7 +505,7 @@ POSTHOOK: Lineage: orcfile_merge1c_n1 PARTITION(ds=1,part=0).value SIMPLE [(src) POSTHOOK: Lineage: orcfile_merge1c_n1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: orcfile_merge1c_n1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Found 1 items --rw-rw-rw- 3 ### USER ### ### GROUP ### 2421 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 2461 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: SELECT SUM(HASH(c)) FROM ( SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM orcfile_merge1_n1 WHERE ds='1' diff --git a/ql/src/test/results/clientpositive/llap/orc_merge10.q.out b/ql/src/test/results/clientpositive/llap/orc_merge10.q.out index 32a4306..0f9bb2d 100644 --- a/ql/src/test/results/clientpositive/llap/orc_merge10.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_merge10.q.out @@ -162,9 +162,9 @@ POSTHOOK: Lineage: orcfile_merge1 PARTITION(ds=1,part=0).value SIMPLE [(src)src. POSTHOOK: Lineage: orcfile_merge1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: orcfile_merge1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Found 3 items --rw-r--r-- 3 ### USER ### ### GROUP ### 933 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-r--r-- 3 ### USER ### ### GROUP ### 861 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-r--r-- 3 ### USER ### ### GROUP ### 842 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 947 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 875 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 856 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE orcfile_merge1b PARTITION (ds='1', part) SELECT key, value, PMOD(HASH(key), 2) as part @@ -337,7 +337,7 @@ POSTHOOK: Lineage: orcfile_merge1b PARTITION(ds=1,part=0).value SIMPLE [(src)src POSTHOOK: Lineage: orcfile_merge1b PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: orcfile_merge1b PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Found 1 items --rw-rw-rw- 3 ### USER ### ### GROUP ### 1740 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 1754 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE orcfile_merge1c PARTITION (ds='1', part) SELECT key, value, PMOD(HASH(key), 2) as part @@ -502,7 +502,7 @@ POSTHOOK: Lineage: orcfile_merge1c PARTITION(ds=1,part=0).value SIMPLE [(src)src POSTHOOK: Lineage: orcfile_merge1c PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: orcfile_merge1c PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Found 1 items --rw-rw-rw- 3 ### USER ### ### GROUP ### 2384 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 2409 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: SELECT SUM(HASH(c)) FROM ( SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM orcfile_merge1 WHERE ds='1' @@ -605,7 +605,7 @@ POSTHOOK: type: ALTER_PARTITION_MERGE POSTHOOK: Input: default@orcfile_merge1 POSTHOOK: Output: default@orcfile_merge1@ds=1/part=0 Found 1 items --rw-rw-rw- 3 ### USER ### ### GROUP ### 2384 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 2409 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: SELECT SUM(HASH(c)) FROM ( SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM orcfile_merge1c WHERE ds='1' @@ -686,21 +686,21 @@ Type: struct Stripe Statistics: Stripe 1: Column 0: count: 90 hasNull: false - Column 1: count: 90 hasNull: false min: 0 max: 495 sum: 22736 - Column 2: count: 90 hasNull: false min: val_0 max: val_86 sum: 612 + Column 1: count: 90 hasNull: false bytesOnDisk: 185 min: 0 max: 495 sum: 22736 + Column 2: count: 90 hasNull: false bytesOnDisk: 428 min: val_0 max: val_86 sum: 612 Stripe 2: Column 0: count: 78 hasNull: false - Column 1: count: 78 hasNull: false min: 0 max: 497 sum: 18371 - Column 2: count: 78 hasNull: false min: val_0 max: val_95 sum: 529 + Column 1: count: 78 hasNull: false bytesOnDisk: 161 min: 0 max: 497 sum: 18371 + Column 2: count: 78 hasNull: false bytesOnDisk: 380 min: val_0 max: val_95 sum: 529 Stripe 3: Column 0: count: 74 hasNull: false - Column 1: count: 74 hasNull: false min: 2 max: 493 sum: 19663 - Column 2: count: 74 hasNull: false min: val_105 max: val_97 sum: 505 + Column 1: count: 74 hasNull: false bytesOnDisk: 153 min: 2 max: 493 sum: 19663 + Column 2: count: 74 hasNull: false bytesOnDisk: 363 min: val_105 max: val_97 sum: 505 File Statistics: Column 0: count: 242 hasNull: false - Column 1: count: 242 hasNull: false min: 0 max: 497 sum: 60770 - Column 2: count: 242 hasNull: false min: val_0 max: val_97 sum: 1646 + Column 1: count: 242 hasNull: false bytesOnDisk: 499 min: 0 max: 497 sum: 60770 + Column 2: count: 242 hasNull: false bytesOnDisk: 1171 min: val_0 max: val_97 sum: 1646 Stripes: Stripe: offset: 3 data: 613 rows: 90 tail: 61 index: 76 @@ -752,7 +752,7 @@ Stripes: Row group indices for column 2: Entry 0: count: 74 hasNull: false min: val_105 max: val_97 sum: 505 positions: 0,0,0,0,0 -File length: 2384 bytes +File length: 2409 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ @@ -775,21 +775,21 @@ Type: struct Stripe Statistics: Stripe 1: Column 0: count: 90 hasNull: false - Column 1: count: 90 hasNull: false min: 0 max: 495 sum: 22736 - Column 2: count: 90 hasNull: false min: val_0 max: val_86 sum: 612 + Column 1: count: 90 hasNull: false bytesOnDisk: 185 min: 0 max: 495 sum: 22736 + Column 2: count: 90 hasNull: false bytesOnDisk: 428 min: val_0 max: val_86 sum: 612 Stripe 2: Column 0: count: 78 hasNull: false - Column 1: count: 78 hasNull: false min: 0 max: 497 sum: 18371 - Column 2: count: 78 hasNull: false min: val_0 max: val_95 sum: 529 + Column 1: count: 78 hasNull: false bytesOnDisk: 161 min: 0 max: 497 sum: 18371 + Column 2: count: 78 hasNull: false bytesOnDisk: 380 min: val_0 max: val_95 sum: 529 Stripe 3: Column 0: count: 74 hasNull: false - Column 1: count: 74 hasNull: false min: 2 max: 493 sum: 19663 - Column 2: count: 74 hasNull: false min: val_105 max: val_97 sum: 505 + Column 1: count: 74 hasNull: false bytesOnDisk: 153 min: 2 max: 493 sum: 19663 + Column 2: count: 74 hasNull: false bytesOnDisk: 363 min: val_105 max: val_97 sum: 505 File Statistics: Column 0: count: 242 hasNull: false - Column 1: count: 242 hasNull: false min: 0 max: 497 sum: 60770 - Column 2: count: 242 hasNull: false min: val_0 max: val_97 sum: 1646 + Column 1: count: 242 hasNull: false bytesOnDisk: 499 min: 0 max: 497 sum: 60770 + Column 2: count: 242 hasNull: false bytesOnDisk: 1171 min: val_0 max: val_97 sum: 1646 Stripes: Stripe: offset: 3 data: 613 rows: 90 tail: 61 index: 76 @@ -841,7 +841,7 @@ Stripes: Row group indices for column 2: Entry 0: count: 74 hasNull: false min: val_105 max: val_97 sum: 505 positions: 0,0,0,0,0 -File length: 2384 bytes +File length: 2409 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ diff --git a/ql/src/test/results/clientpositive/llap/orc_merge11.q.out b/ql/src/test/results/clientpositive/llap/orc_merge11.q.out index a4ec749..1b2ddd3 100644 --- a/ql/src/test/results/clientpositive/llap/orc_merge11.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_merge11.q.out @@ -81,19 +81,19 @@ Type: struct= 0.0D) (type: boolean) - Statistics: Num rows: 36 Data size: 6383 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 6562 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 36 Data size: 6383 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 6562 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -490,19 +490,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: tab1_n6 - Statistics: Num rows: 110 Data size: 19504 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 111 Data size: 19688 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 105 Data size: 18617 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 106 Data size: 18801 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 105 Data size: 18617 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 106 Data size: 18801 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 105 Data size: 18617 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 106 Data size: 18801 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map 4 @@ -533,7 +533,7 @@ STAGE PLANS: keys: 0 _col0 (type: string) 1 _col0 (type: string) - Statistics: Num rows: 115 Data size: 20478 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 116 Data size: 20681 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -634,11 +634,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: tab2_n5 - Statistics: Num rows: 110 Data size: 19504 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 111 Data size: 19688 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 110 Data size: 19504 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 111 Data size: 19688 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: max(key) mode: hash @@ -705,38 +705,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: tab1_n6 - Statistics: Num rows: 110 Data size: 19504 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 111 Data size: 19688 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 105 Data size: 18617 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 106 Data size: 18801 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 105 Data size: 18617 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 106 Data size: 18801 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 105 Data size: 18617 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 106 Data size: 18801 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan alias: tab2_n5 - Statistics: Num rows: 110 Data size: 19504 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 111 Data size: 19688 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 105 Data size: 18617 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 106 Data size: 18801 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 105 Data size: 18617 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 106 Data size: 18801 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 105 Data size: 18617 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 106 Data size: 18801 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Reducer 2 @@ -748,7 +748,7 @@ STAGE PLANS: keys: 0 _col0 (type: string) 1 _col0 (type: string) - Statistics: Num rows: 115 Data size: 20478 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 116 Data size: 20681 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: hash diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_stats.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_stats.q.out index eccb983..5c45776 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_stats.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_stats.q.out @@ -290,7 +290,7 @@ Table Parameters: numPartitions 2 numRows 8 rawDataSize 1116 - totalSize 819 + totalSize 848 #### A masked pattern was here #### # Storage Information @@ -327,7 +327,7 @@ Partition Parameters: numFiles 1 numRows 4 rawDataSize 384 - totalSize 313 + totalSize 323 #### A masked pattern was here #### # Storage Information @@ -366,7 +366,7 @@ Partition Parameters: numFiles 1 numRows 4 rawDataSize 732 - totalSize 506 + totalSize 525 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out index 5052fe6..ca2bfb7 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out @@ -92,8 +92,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -336,8 +335,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -497,8 +495,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -745,8 +742,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out index df136bf..8765301 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out @@ -182,8 +182,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -513,8 +512,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -775,8 +773,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -992,8 +989,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1198,7 +1194,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 3595 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:insert_num:int, 1:c1:char(12), 2:c2:char(25), 3:c3:varchar(25), 4:c4:varchar(10), 5:c5:decimal(12,4), 6:c6:decimal(20,10), 7:b:string, 8:part:int, 9:ROW__ID:struct] + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:char(12), 2:c2:char(25), 3:c3:varchar(25), 4:c4:varchar(10), 5:c5:decimal(12,4)/DECIMAL_64, 6:c6:decimal(20,10), 7:b:string, 8:part:int, 9:ROW__ID:struct] Select Operator expressions: insert_num (type: int), part (type: int), c1 (type: char(12)), c2 (type: char(25)), c3 (type: varchar(25)), c4 (type: varchar(10)), c5 (type: decimal(12,4)), c6 (type: decimal(20,10)), b (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 @@ -1223,8 +1219,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1232,7 +1227,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 8 includeColumns: [0, 1, 2, 3, 4, 5, 6, 7] - dataColumns: insert_num:int, c1:char(12), c2:char(25), c3:varchar(25), c4:varchar(10), c5:decimal(12,4), c6:decimal(20,10), b:string + dataColumns: insert_num:int, c1:char(12), c2:char(25), c3:varchar(25), c4:varchar(10), c5:decimal(12,4)/DECIMAL_64, c6:decimal(20,10), b:string partitionColumnCount: 1 partitionColumns: part:int scratchColumnTypeNames: [] diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out index 7259b33..36b53e5 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out @@ -88,8 +88,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -200,8 +199,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -314,8 +312,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -439,8 +436,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -590,8 +586,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -736,8 +731,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -875,8 +869,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1028,8 +1021,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1158,8 +1150,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out index 8f83622..867e134 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out @@ -88,8 +88,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -194,8 +193,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -293,8 +291,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -400,8 +397,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -501,8 +497,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -608,8 +603,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -716,8 +710,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -879,8 +872,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1005,8 +997,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1134,8 +1125,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1255,8 +1245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1377,8 +1366,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1556,8 +1544,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1726,8 +1713,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out b/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out index 1a32227..5a23539 100644 --- a/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out @@ -83,8 +83,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true @@ -231,8 +230,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out b/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out index c08154e..f312244 100644 --- a/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out @@ -79,8 +79,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -191,8 +190,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -303,8 +301,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out index 5e835cd..3170625 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out @@ -62,7 +62,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)) outputColumnNames: _col0 @@ -86,8 +86,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -95,7 +95,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -178,7 +178,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)) outputColumnNames: _col0 @@ -203,8 +203,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -212,7 +211,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out index 902d137..4f535ff 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out @@ -111,8 +111,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -291,8 +291,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -465,7 +465,7 @@ STAGE PLANS: Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5), 2:cdecimal2:decimal(16,0), 3:cint:int, 4:ROW__ID:struct] + vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5)/DECIMAL_64, 2:cdecimal2:decimal(16,0)/DECIMAL_64, 3:cint:int, 4:ROW__ID:struct] Select Operator expressions: cdecimal1 (type: decimal(11,5)), cdecimal2 (type: decimal(16,0)), cint (type: int) outputColumnNames: cdecimal1, cdecimal2, cint @@ -477,7 +477,7 @@ STAGE PLANS: Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), count() Group By Vectorization: - aggregators: VectorUDAFCount(col 1:decimal(11,5)) -> bigint, VectorUDAFMaxDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFMinDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFSumDecimal(col 1:decimal(11,5)) -> decimal(21,5), VectorUDAFCount(col 2:decimal(16,0)) -> bigint, VectorUDAFMaxDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFMinDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFSumDecimal(col 2:decimal(16,0)) -> decimal(26,0), VectorUDAFCountStar(*) -> bigint + aggregators: VectorUDAFCount(col 1:decimal(11,5)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFMinDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> decimal(21,5), VectorUDAFCount(col 2:decimal(16,0)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFMinDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> decimal(26,0), VectorUDAFCountStar(*) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 3:int @@ -506,8 +506,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -515,7 +514,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 4 includeColumns: [1, 2, 3] - dataColumns: cdouble:double, cdecimal1:decimal(11,5), cdecimal2:decimal(16,0), cint:int + dataColumns: cdouble:double, cdecimal1:decimal(11,5)/DECIMAL_64, cdecimal2:decimal(16,0)/DECIMAL_64, cint:int partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -664,20 +663,20 @@ STAGE PLANS: Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5), 2:cdecimal2:decimal(16,0), 3:cint:int, 4:ROW__ID:struct] + vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5)/DECIMAL_64, 2:cdecimal2:decimal(16,0)/DECIMAL_64, 3:cint:int, 4:ROW__ID:struct] Select Operator expressions: cint (type: int), cdecimal1 (type: decimal(11,5)), cdecimal2 (type: decimal(16,0)), UDFToDouble(cdecimal1) (type: double), (UDFToDouble(cdecimal1) * UDFToDouble(cdecimal1)) (type: double), UDFToDouble(cdecimal2) (type: double), (UDFToDouble(cdecimal2) * UDFToDouble(cdecimal2)) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [3, 1, 2, 5, 8, 6, 10] - selectExpressions: CastDecimalToDouble(col 1:decimal(11,5)) -> 5:double, DoubleColMultiplyDoubleColumn(col 6:double, col 7:double)(children: CastDecimalToDouble(col 1:decimal(11,5)) -> 6:double, CastDecimalToDouble(col 1:decimal(11,5)) -> 7:double) -> 8:double, CastDecimalToDouble(col 2:decimal(16,0)) -> 6:double, DoubleColMultiplyDoubleColumn(col 7:double, col 9:double)(children: CastDecimalToDouble(col 2:decimal(16,0)) -> 7:double, CastDecimalToDouble(col 2:decimal(16,0)) -> 9:double) -> 10:double + projectedOutputColumnNums: [3, 1, 2, 6, 9, 7, 12] + selectExpressions: CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 6:double, DoubleColMultiplyDoubleColumn(col 7:double, col 8:double)(children: CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 7:double, CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 8:double) -> 9:double, CastDecimalToDouble(col 10:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) -> 7:double, DoubleColMultiplyDoubleColumn(col 8:double, col 11:double)(children: CastDecimalToDouble(col 10:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) -> 8:double, CastDecimalToDouble(col 10:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) -> 11:double) -> 12:double Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(_col1), max(_col1), min(_col1), sum(_col1), sum(_col4), sum(_col3), count(_col2), max(_col2), min(_col2), sum(_col2), sum(_col6), sum(_col5), count() Group By Vectorization: - aggregators: VectorUDAFCount(col 1:decimal(11,5)) -> bigint, VectorUDAFMaxDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFMinDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFSumDecimal(col 1:decimal(11,5)) -> decimal(21,5), VectorUDAFSumDouble(col 8:double) -> double, VectorUDAFSumDouble(col 5:double) -> double, VectorUDAFCount(col 2:decimal(16,0)) -> bigint, VectorUDAFMaxDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFMinDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFSumDecimal(col 2:decimal(16,0)) -> decimal(26,0), VectorUDAFSumDouble(col 10:double) -> double, VectorUDAFSumDouble(col 6:double) -> double, VectorUDAFCountStar(*) -> bigint + aggregators: VectorUDAFCount(col 1:decimal(11,5)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFMinDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> decimal(21,5), VectorUDAFSumDouble(col 9:double) -> double, VectorUDAFSumDouble(col 6:double) -> double, VectorUDAFCount(col 2:decimal(16,0)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFMinDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> decimal(26,0), VectorUDAFSumDouble(col 12:double) -> double, VectorUDAFSumDouble(col 7:double) -> double, VectorUDAFCountStar(*) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 3:int @@ -706,8 +705,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -715,9 +713,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 4 includeColumns: [1, 2, 3] - dataColumns: cdouble:double, cdecimal1:decimal(11,5), cdecimal2:decimal(16,0), cint:int + dataColumns: cdouble:double, cdecimal1:decimal(11,5)/DECIMAL_64, cdecimal2:decimal(16,0)/DECIMAL_64, cint:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double, double, double] + scratchColumnTypeNames: [decimal(11,5), double, double, double, double, decimal(16,0), double, double] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out index 9e5885e..cff4513 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out @@ -60,8 +60,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -184,8 +184,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out index 50e4305..5e7e8ca 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out @@ -612,8 +612,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1232,8 +1232,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out index 6737052..eb4a588 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out @@ -59,15 +59,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col1 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -77,7 +77,7 @@ STAGE PLANS: keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumnNums: [3] Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(11,0)) Execution mode: vectorized, llap @@ -86,8 +86,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -95,9 +94,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -177,22 +176,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col2 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col2 (type: decimal(11,0)) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [0] @@ -204,8 +203,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -213,9 +211,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -585,15 +583,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col1 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -603,7 +601,7 @@ STAGE PLANS: keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumnNums: [3] Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(11,0)) Execution mode: vectorized, llap @@ -611,8 +609,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -620,9 +618,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -702,22 +700,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col2 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col2 (type: decimal(11,0)) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [0] @@ -728,8 +726,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -737,9 +735,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out index 9e1c8d7..4c9b737 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out @@ -86,12 +86,12 @@ STAGE PLANS: Statistics: Num rows: 39 Data size: 4032 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -118,8 +118,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -127,7 +127,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double] @@ -195,12 +195,12 @@ STAGE PLANS: Statistics: Num rows: 39 Data size: 4188 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 2 Data size: 232 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -227,8 +227,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -236,7 +236,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double, double] @@ -310,12 +310,12 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -343,8 +343,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -352,7 +351,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double] @@ -420,12 +419,12 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -453,8 +452,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true @@ -462,7 +460,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double, double] diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out index 2ea9018..5c0d6bb 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out @@ -84,8 +84,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -222,8 +221,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -386,8 +384,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -648,8 +645,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1038,8 +1034,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out index 98e6e54..7dc98c2 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out @@ -61,8 +61,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -129,8 +128,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out index b772e9a..ab29314 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out @@ -98,8 +98,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -521,8 +520,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -706,8 +704,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -887,8 +884,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out index 74ad6ae..5e946c4 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out @@ -107,8 +107,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out b/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out index f7721b6..4cc23c4 100644 --- a/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out @@ -179,8 +179,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -259,8 +258,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_map_order.q.out b/ql/src/test/results/clientpositive/llap/vector_map_order.q.out index 02fc5a0..238555c 100644 --- a/ql/src/test/results/clientpositive/llap/vector_map_order.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_map_order.q.out @@ -75,8 +75,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out b/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out index 37821fb..db9c142 100644 --- a/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out @@ -67,8 +67,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -111,8 +110,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -166,8 +164,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -339,8 +336,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -384,8 +380,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -440,8 +435,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_null_map.q.out b/ql/src/test/results/clientpositive/llap/vector_null_map.q.out index 666f7fd..5394cc6 100644 --- a/ql/src/test/results/clientpositive/llap/vector_null_map.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_null_map.q.out @@ -76,8 +76,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -149,8 +148,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out b/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out index f1a4ea3..90e2103 100644 --- a/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out @@ -161,8 +161,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true @@ -245,8 +244,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_order_null.q.out b/ql/src/test/results/clientpositive/llap/vector_order_null.q.out index 08c57bd..cb4053e 100644 --- a/ql/src/test/results/clientpositive/llap/vector_order_null.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_order_null.q.out @@ -127,8 +127,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -250,8 +249,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -373,8 +371,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -496,8 +493,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -619,8 +615,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -742,8 +737,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -865,8 +859,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -988,8 +981,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1111,8 +1103,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1234,8 +1225,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1357,8 +1347,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out b/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out index 89c14d5..2b0a1e7 100644 --- a/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out @@ -271,7 +271,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(15,2)) outputColumnNames: c1 @@ -283,7 +283,7 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(15,2)) -> decimal(25,2) + aggregators: VectorUDAFSumDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> decimal(25,2) className: VectorGroupByOperator groupByMode: HASH native: false @@ -308,8 +308,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -317,7 +316,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -489,7 +488,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(15,2)), c2 (type: decimal(15,2)) outputColumnNames: c1, c2 @@ -501,10 +500,10 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(15,2)) -> decimal(25,2) + aggregators: VectorUDAFSumDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> decimal(25,2) className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:decimal(15,2), col 1:decimal(15,2) + keyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2), ConvertDecimal64ToDecimal(col 1:decimal(15,2)/DECIMAL_64) -> 4:decimal(15,2) native: false vectorProcessingMode: HASH projectedOutputColumnNums: [0] @@ -530,8 +529,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -539,9 +537,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2), decimal(15,2)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -726,12 +724,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -760,8 +758,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -769,9 +766,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Map 5 Map Operator Tree: TableScan @@ -779,12 +776,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -812,8 +809,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -821,9 +817,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1035,12 +1031,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1068,8 +1064,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1077,9 +1072,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Map 5 Map Operator Tree: TableScan @@ -1087,12 +1082,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1121,8 +1116,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1130,9 +1124,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1343,12 +1337,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1377,8 +1371,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1386,9 +1379,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Map 4 Map Operator Tree: TableScan @@ -1396,12 +1389,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1430,8 +1423,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1439,9 +1431,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1571,7 +1563,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(7,2)) outputColumnNames: c1 @@ -1583,7 +1575,7 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(7,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(7,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: HASH native: false @@ -1608,8 +1600,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1617,7 +1608,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1789,7 +1780,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(7,2)), c2 (type: decimal(7,2)) outputColumnNames: c1, c2 @@ -1801,10 +1792,10 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(7,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(7,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:decimal(7,2), col 1:decimal(7,2) + keyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2), ConvertDecimal64ToDecimal(col 1:decimal(7,2)/DECIMAL_64) -> 4:decimal(7,2) native: false vectorProcessingMode: HASH projectedOutputColumnNums: [0] @@ -1830,8 +1821,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1839,9 +1829,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2), decimal(7,2)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -2026,12 +2016,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2060,8 +2050,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2069,9 +2058,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Map 5 Map Operator Tree: TableScan @@ -2079,12 +2068,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2112,8 +2101,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2121,9 +2109,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -2335,12 +2323,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2368,8 +2356,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2377,9 +2364,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Map 5 Map Operator Tree: TableScan @@ -2387,12 +2374,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2421,8 +2408,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2430,9 +2416,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -2643,12 +2629,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2677,8 +2663,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2686,9 +2671,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Map 4 Map Operator Tree: TableScan @@ -2696,12 +2681,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2730,8 +2715,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2739,9 +2723,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git a/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out b/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out index 568549d..ef4934e 100644 --- a/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out @@ -111,8 +111,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out b/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out index 1ed694d..bda96da 100644 --- a/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out @@ -128,8 +128,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out b/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out index 64e158e..c2342b2 100644 --- a/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out @@ -80,8 +80,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing.q.out index 428ee8d..a5d6167 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing.q.out @@ -56,8 +56,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -259,8 +258,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -467,8 +465,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -646,8 +643,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -841,8 +837,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1043,8 +1038,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1251,8 +1245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1296,8 +1289,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1537,8 +1529,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1697,8 +1688,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1874,8 +1864,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2053,8 +2042,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2244,8 +2232,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2411,8 +2398,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2578,8 +2564,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2798,8 +2783,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3090,8 +3074,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3369,8 +3352,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3622,8 +3604,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3836,8 +3817,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4061,8 +4041,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4258,8 +4237,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4526,8 +4504,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4808,8 +4785,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5323,8 +5299,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6080,8 +6055,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6263,8 +6237,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6428,8 +6401,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6585,8 +6557,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6748,8 +6719,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6921,8 +6891,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7088,8 +7057,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7265,8 +7233,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7446,8 +7413,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7626,8 +7592,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7824,8 +7789,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8020,8 +7984,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8244,8 +8207,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8485,8 +8447,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8674,8 +8635,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8829,8 +8789,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9017,8 +8976,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9172,8 +9130,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9331,8 +9288,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9564,8 +9520,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9705,8 +9660,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9854,8 +9808,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out index 148f82b..2bb7730 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out @@ -102,8 +102,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -296,8 +295,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -438,7 +436,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: t (type: tinyint), bo (type: boolean), s (type: string), si (type: smallint), f (type: float) sort order: ++++- @@ -457,8 +455,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -466,7 +463,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 1, 4, 6, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -662,7 +659,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), i (type: int), s (type: string) sort order: +++ @@ -681,8 +678,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -690,7 +686,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -886,7 +882,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 204 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: b (type: bigint), si (type: smallint), s (type: string), d (type: double) sort order: ++++ @@ -905,8 +901,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -914,7 +909,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 3, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1110,7 +1105,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: f (type: float), b (type: bigint) sort order: ++ @@ -1130,8 +1125,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1139,7 +1133,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1354,8 +1348,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1533,8 +1526,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1680,7 +1672,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), i (type: int) sort order: ++ @@ -1700,8 +1692,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1709,7 +1700,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1944,8 +1935,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out index d87e96f..993ea61 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out @@ -70,8 +70,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -122,8 +121,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out index 8dcb900..493d404 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out @@ -74,8 +74,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -317,8 +316,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -554,8 +552,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -913,8 +910,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -965,8 +961,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out index 01bcb69..1a06f08 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint) sort order: ++ @@ -94,8 +94,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -103,7 +102,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 3, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10242,7 +10241,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -10269,8 +10268,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10278,7 +10276,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 7, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10520,7 +10518,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -10546,8 +10544,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10555,7 +10552,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10787,7 +10784,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -10814,8 +10811,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10823,7 +10819,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 6, 7, 10] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11060,7 +11056,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -11086,8 +11082,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11095,7 +11090,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11340,7 +11335,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 304 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -11367,8 +11362,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11376,7 +11370,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 4, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out index 5466297..42e9694 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out @@ -100,8 +100,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -226,7 +225,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 304 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), dec (type: decimal(4,2)) sort order: ++ @@ -246,8 +245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -255,7 +253,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [5, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -485,7 +483,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: bin (type: binary), d (type: double), i (type: int) sort order: ++- @@ -505,8 +503,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -514,7 +511,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7, 10] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -710,7 +707,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), s (type: string), dec (type: decimal(4,2)) sort order: +++ @@ -729,8 +726,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -738,7 +734,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -934,7 +930,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), f (type: float) sort order: ++ @@ -954,8 +950,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -963,7 +958,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 4, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1193,7 +1188,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: bo (type: boolean), s (type: string) sort order: ++ @@ -1212,8 +1207,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1221,7 +1215,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [6, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1418,7 +1412,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1446,8 +1440,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1455,7 +1448,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [bigint, bigint] Reducer 2 @@ -1639,8 +1632,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1812,8 +1804,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1985,8 +1976,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2158,8 +2148,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out index f950c4c..91b52e7 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out @@ -82,7 +82,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), s (type: string), b (type: bigint) sort order: +++ @@ -101,8 +101,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -110,7 +109,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 3, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -215,7 +214,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), s (type: string), f (type: float) sort order: ++- @@ -234,8 +233,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -243,7 +241,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -348,7 +346,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -368,8 +366,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -377,7 +374,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -482,7 +479,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: t (type: tinyint), s (type: string), d (type: double) sort order: ++- @@ -501,8 +498,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -510,7 +506,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -615,7 +611,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), s (type: string) sort order: ++ @@ -635,8 +631,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -644,7 +639,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -785,7 +780,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: +- @@ -805,8 +800,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -814,7 +808,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -950,7 +944,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: +- @@ -970,8 +964,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -979,7 +972,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1115,7 +1108,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -1135,8 +1128,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1144,7 +1136,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out index 3a1b9c5e..782bd9b 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), i (type: int), b (type: bigint) sort order: +++ @@ -94,8 +94,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -103,7 +102,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 1, 2, 3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -333,7 +332,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), bo (type: boolean), i (type: int), f (type: float) sort order: +++- @@ -352,8 +351,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -361,7 +359,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -556,7 +554,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), bo (type: boolean), i (type: int), f (type: float) sort order: +++- @@ -575,8 +573,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -584,7 +581,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -779,7 +776,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string) sort order: + @@ -798,8 +795,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -807,7 +803,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10929,7 +10925,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint), i (type: int) sort order: +++ @@ -10948,8 +10944,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10957,7 +10952,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11187,7 +11182,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint), i (type: int) sort order: +++ @@ -11206,8 +11201,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11215,7 +11209,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11445,7 +11439,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint), i (type: int) sort order: ++- @@ -11464,8 +11458,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11473,7 +11466,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11703,7 +11696,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), bo (type: boolean), i (type: int), f (type: float) sort order: +++- @@ -11722,8 +11715,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11731,7 +11723,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11961,7 +11953,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), bo (type: boolean), b (type: bigint) sort order: +++ @@ -11980,8 +11972,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11989,7 +11980,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 3, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -12185,7 +12176,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), CAST( s AS CHAR(12)) (type: char(12)) sort order: ++ @@ -12206,8 +12197,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12215,7 +12205,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [string] Reducer 2 @@ -12411,7 +12401,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), CAST( s AS varchar(12)) (type: varchar(12)) sort order: ++ @@ -12432,8 +12422,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12441,7 +12430,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [string] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out index 2bf3b07..ff7cf6c 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: f (type: float), t (type: tinyint) sort order: ++ @@ -94,8 +94,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -103,7 +102,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -334,7 +333,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), i (type: int), s (type: string) sort order: ++- @@ -353,8 +352,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -362,7 +360,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -558,7 +556,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: bo (type: boolean), b (type: bigint), s (type: string) sort order: +++ @@ -577,8 +575,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -586,7 +583,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 6, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -782,7 +779,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: dec (type: decimal(4,2)), f (type: float) sort order: ++ @@ -802,8 +799,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -811,7 +807,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1028,7 +1024,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1062,8 +1058,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1071,7 +1066,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Map 4 @@ -1081,7 +1076,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1114,8 +1109,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1123,7 +1117,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1329,7 +1323,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1363,8 +1357,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1372,7 +1365,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Map 4 @@ -1382,7 +1375,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1415,8 +1408,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1424,7 +1416,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1632,7 +1624,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1666,8 +1658,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1675,7 +1666,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 3, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Map 4 @@ -1685,7 +1676,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1718,8 +1709,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1727,7 +1717,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out index e120391..55899ef 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out @@ -95,8 +95,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -242,8 +241,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -425,7 +423,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -450,8 +448,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -459,7 +456,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 4] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -853,8 +850,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out index e3d52d2..93b8655 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), s (type: string), b (type: bigint) sort order: +++ @@ -93,8 +93,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -102,7 +101,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 3, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -297,7 +296,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), s (type: string), f (type: float) sort order: +++ @@ -316,8 +315,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -325,7 +323,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -520,7 +518,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -540,8 +538,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -549,7 +546,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -744,7 +741,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), s (type: string), f (type: float) sort order: +++ @@ -763,8 +760,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -772,7 +768,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -967,7 +963,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: t (type: tinyint), s (type: string), d (type: double) sort order: ++- @@ -986,8 +982,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -995,7 +990,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1190,7 +1185,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), s (type: string) sort order: ++ @@ -1210,8 +1205,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1219,7 +1213,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1449,7 +1443,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -1468,8 +1462,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1477,7 +1470,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1707,7 +1700,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -1726,8 +1719,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1735,7 +1727,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1930,7 +1922,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -1950,8 +1942,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1959,7 +1950,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -2097,7 +2088,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -2117,8 +2108,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2126,7 +2116,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -2264,7 +2254,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -2284,8 +2274,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2293,7 +2282,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out index 3cebb04..78df440 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out @@ -92,8 +92,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out index a6a0371..cb44e85 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out @@ -3726,7 +3726,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true Select Operator @@ -3734,7 +3734,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [] - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() Group By Vectorization: diff --git a/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out b/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out index 61c5051..dfe5279 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out @@ -145,8 +145,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -180,8 +180,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -280,8 +280,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -415,8 +415,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -451,8 +451,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out index d3ab509..8ee96d3 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out @@ -82,8 +82,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -261,8 +260,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -305,8 +303,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -419,8 +417,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -448,8 +445,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -578,8 +575,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -622,8 +618,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -666,8 +662,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -804,8 +800,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -833,8 +828,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -862,8 +857,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1006,8 +1001,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1065,8 +1059,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1179,8 +1173,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1208,8 +1201,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1333,8 +1326,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1377,8 +1369,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1491,8 +1483,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1520,8 +1511,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1643,8 +1634,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1687,8 +1677,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1801,8 +1791,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1845,8 +1834,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1959,8 +1948,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1988,8 +1976,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2102,8 +2090,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2131,8 +2118,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2258,8 +2245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2302,8 +2288,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2437,8 +2423,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2577,8 +2562,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2605,8 +2589,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2723,8 +2707,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2782,8 +2765,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2896,8 +2879,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2940,8 +2922,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3052,8 +3034,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3078,8 +3060,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3172,8 +3153,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3216,8 +3196,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3314,8 +3294,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3358,8 +3337,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3402,8 +3381,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3537,8 +3516,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3594,8 +3573,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3722,8 +3701,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3752,8 +3730,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3782,8 +3759,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3982,8 +3958,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4012,8 +3987,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4042,8 +4016,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4247,8 +4220,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4277,8 +4249,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4305,8 +4276,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4335,8 +4305,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4597,8 +4566,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4641,8 +4609,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4773,8 +4741,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4817,8 +4784,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4861,8 +4828,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4983,8 +4950,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5042,8 +5008,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5161,8 +5127,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5205,8 +5170,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5313,8 +5278,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5357,8 +5321,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5465,8 +5429,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5509,8 +5472,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5625,8 +5588,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5779,8 +5741,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5823,8 +5784,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5914,8 +5875,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5940,8 +5901,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6015,8 +5975,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6057,8 +6016,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6157,8 +6116,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6201,8 +6159,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6245,8 +6203,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6355,8 +6313,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6425,8 +6383,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6533,8 +6491,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6563,8 +6520,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6593,8 +6549,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/orc_file_dump.q.out b/ql/src/test/results/clientpositive/orc_file_dump.q.out index 2067145..8ec71a9 100644 --- a/ql/src/test/results/clientpositive/orc_file_dump.q.out +++ b/ql/src/test/results/clientpositive/orc_file_dump.q.out @@ -102,34 +102,34 @@ Type: struct Stripe Statistics: Stripe 1: Column 0: count: 152 hasNull: false - Column 1: count: 152 hasNull: false min: 0 max: 497 sum: 38034 - Column 2: count: 152 hasNull: false min: val_0 max: val_97 sum: 1034 + Column 1: count: 152 hasNull: false bytesOnDisk: 309 min: 0 max: 497 sum: 38034 + Column 2: count: 152 hasNull: false bytesOnDisk: 679 min: val_0 max: val_97 sum: 1034 Stripe 2: Column 0: count: 90 hasNull: false - Column 1: count: 90 hasNull: false min: 0 max: 495 sum: 22736 - Column 2: count: 90 hasNull: false min: val_0 max: val_86 sum: 612 + Column 1: count: 90 hasNull: false bytesOnDisk: 185 min: 0 max: 495 sum: 22736 + Column 2: count: 90 hasNull: false bytesOnDisk: 428 min: val_0 max: val_86 sum: 612 File Statistics: Column 0: count: 242 hasNull: false - Column 1: count: 242 hasNull: false min: 0 max: 497 sum: 60770 - Column 2: count: 242 hasNull: false min: val_0 max: val_97 sum: 1646 + Column 1: count: 242 hasNull: false bytesOnDisk: 494 min: 0 max: 497 sum: 60770 + Column 2: count: 242 hasNull: false bytesOnDisk: 1107 min: val_0 max: val_97 sum: 1646 Stripes: Stripe: offset: 3 data: 988 rows: 152 tail: 72 index: 77 @@ -675,7 +675,7 @@ Stripes: Row group indices for column 2: Entry 0: count: 90 hasNull: false min: val_0 max: val_86 sum: 612 positions: 0,0,0,0,0 -File length: 2134 bytes +File length: 2155 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ @@ -698,17 +698,17 @@ Type: struct Stripe Statistics: Stripe 1: Column 0: count: 152 hasNull: false - Column 1: count: 152 hasNull: false min: 0 max: 497 sum: 38034 - Column 2: count: 152 hasNull: false min: val_0 max: val_97 sum: 1034 + Column 1: count: 152 hasNull: false bytesOnDisk: 309 min: 0 max: 497 sum: 38034 + Column 2: count: 152 hasNull: false bytesOnDisk: 679 min: val_0 max: val_97 sum: 1034 Stripe 2: Column 0: count: 90 hasNull: false - Column 1: count: 90 hasNull: false min: 0 max: 495 sum: 22736 - Column 2: count: 90 hasNull: false min: val_0 max: val_86 sum: 612 + Column 1: count: 90 hasNull: false bytesOnDisk: 185 min: 0 max: 495 sum: 22736 + Column 2: count: 90 hasNull: false bytesOnDisk: 428 min: val_0 max: val_86 sum: 612 File Statistics: Column 0: count: 242 hasNull: false - Column 1: count: 242 hasNull: false min: 0 max: 497 sum: 60770 - Column 2: count: 242 hasNull: false min: val_0 max: val_97 sum: 1646 + Column 1: count: 242 hasNull: false bytesOnDisk: 494 min: 0 max: 497 sum: 60770 + Column 2: count: 242 hasNull: false bytesOnDisk: 1107 min: val_0 max: val_97 sum: 1646 Stripes: Stripe: offset: 3 data: 988 rows: 152 tail: 72 index: 77 @@ -745,7 +745,7 @@ Stripes: Row group indices for column 2: Entry 0: count: 90 hasNull: false min: val_0 max: val_86 sum: 612 positions: 0,0,0,0,0 -File length: 2134 bytes +File length: 2155 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ diff --git a/ql/src/test/results/clientpositive/orc_merge11.q.out b/ql/src/test/results/clientpositive/orc_merge11.q.out index a4ec749..1b2ddd3 100644 --- a/ql/src/test/results/clientpositive/orc_merge11.q.out +++ b/ql/src/test/results/clientpositive/orc_merge11.q.out @@ -81,19 +81,19 @@ Type: struct 12:int - Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) Group By Vectorization: diff --git a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out index 0336982..8dbd679 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out @@ -3697,7 +3697,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true Select Operator @@ -3705,7 +3705,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [] - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() Group By Vectorization: @@ -3808,7 +3808,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -3818,7 +3818,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(ctinyint) Group By Vectorization: @@ -3921,7 +3921,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -3931,7 +3931,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [2] - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cint) Group By Vectorization: @@ -4034,7 +4034,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -4044,7 +4044,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [4] - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cfloat) Group By Vectorization: @@ -4147,7 +4147,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -4157,7 +4157,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [6] - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cstring1) Group By Vectorization: @@ -4260,7 +4260,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -4270,7 +4270,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [10] - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cboolean1) Group By Vectorization: diff --git a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out index fe5fd23..18926cb 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out @@ -258,7 +258,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -269,7 +269,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13, 0, 1, 3, 14, 15, 16, 17, 18] selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 9:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 10:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 11:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 12:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 13:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-02-02 16:31:57.778) -> 14:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 08:42:30.0005, col 1:timestamp) -> 15:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 16:timestamp, IfExprColumnNull(col 0:boolean, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 17:timestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 18:timestamp - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -277,7 +277,7 @@ STAGE PLANS: className: VectorReduceSinkObjectHashOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp) Execution mode: vectorized Map Vectorization: @@ -305,13 +305,13 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -466,7 +466,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -477,7 +477,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13] selectExpressions: VectorUDFUnixTimeStampString(col 2:string) -> 5:bigint, VectorUDFYearString(col 2:string, fieldStart 0, fieldLength 4) -> 6:int, VectorUDFMonthString(col 2:string, fieldStart 5, fieldLength 2) -> 7:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 8:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 9:int, VectorUDFWeekOfYearString(col 2:string) -> 10:int, VectorUDFHourString(col 2:string, fieldStart 11, fieldLength 2) -> 11:int, VectorUDFMinuteString(col 2:string, fieldStart 14, fieldLength 2) -> 12:int, VectorUDFSecondString(col 2:string, fieldStart 17, fieldLength 2) -> 13:int - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -485,7 +485,7 @@ STAGE PLANS: className: VectorReduceSinkObjectHashOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int) Execution mode: vectorized Map Vectorization: @@ -513,13 +513,13 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -658,7 +658,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -669,7 +669,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [7, 8, 9, 10, 11, 12, 13, 14, 15] selectExpressions: LongColEqualLongColumn(col 5:bigint, col 6:bigint)(children: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFUnixTimeStampString(col 2:string) -> 6:bigint) -> 7:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 5:int, VectorUDFYearString(col 2:string, fieldStart 0, fieldLength 4) -> 6:int) -> 8:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 5:int, VectorUDFMonthString(col 2:string, fieldStart 5, fieldLength 2) -> 6:int) -> 9:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 5:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 6:int) -> 10:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 5:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 6:int) -> 11:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 5:int, VectorUDFWeekOfYearString(col 2:string) -> 6:int) -> 12:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 5:int, VectorUDFHourString(col 2:string, fieldStart 11, fieldLength 2) -> 6:int) -> 13:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 5:int, VectorUDFMinuteString(col 2:string, fieldStart 14, fieldLength 2) -> 6:int) -> 14:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 5:int, VectorUDFSecondString(col 2:string, fieldStart 17, fieldLength 2) -> 6:int) -> 15:boolean - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + @@ -677,7 +677,7 @@ STAGE PLANS: className: VectorReduceSinkObjectHashOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean) Execution mode: vectorized Map Vectorization: @@ -705,13 +705,13 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -981,7 +981,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -991,7 +991,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1] - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: min(ctimestamp1), max(ctimestamp1), count(ctimestamp1), count() Group By Vectorization: @@ -1106,7 +1106,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -1116,7 +1116,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1] - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(ctimestamp1) Group By Vectorization: @@ -1248,7 +1248,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -1259,7 +1259,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 5, 8] selectExpressions: CastTimestampToDouble(col 1:timestamp) -> 5:double, DoubleColMultiplyDoubleColumn(col 6:double, col 7:double)(children: CastTimestampToDouble(col 1:timestamp) -> 6:double, CastTimestampToDouble(col 1:timestamp) -> 7:double) -> 8:double - Statistics: Num rows: 52 Data size: 3555 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0), count(_col0), sum(_col2), sum(_col1) Group By Vectorization: diff --git a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out index a6f12af..01ec132 100644 --- a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out +++ b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out @@ -370,10 +370,10 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE [(over10k_n9)over10k_n9.Fie POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ] POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ] Found 4 items --rw-rw-rw- 3 ### USER ### ### GROUP ### 8942 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7710 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7297 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7204 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 8997 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7773 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7358 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7261 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: insert into over10k_orc_bucketed_n0 select * from over10k_n9 PREHOOK: type: QUERY PREHOOK: Input: default@over10k_n9 @@ -394,14 +394,14 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE [(over10k_n9)over10k_n9.Fie POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ] POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ] Found 8 items --rw-rw-rw- 3 ### USER ### ### GROUP ### 8942 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 8942 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7710 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7710 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7297 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7297 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7204 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7204 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 8997 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 8997 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7773 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7773 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7358 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7358 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7261 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7261 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: select distinct 7 as seven, INPUT__FILE__NAME from over10k_orc_bucketed_n0 PREHOOK: type: QUERY PREHOOK: Input: default@over10k_orc_bucketed_n0 @@ -680,22 +680,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: over10k_orc_bucketed_n0 - Statistics: Num rows: 1229 Data size: 703430 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1241 Data size: 710230 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ROW__ID (type: struct) outputColumnNames: ROW__ID - Statistics: Num rows: 1229 Data size: 703430 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1241 Data size: 710230 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() keys: ROW__ID (type: struct) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 614 Data size: 51576 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 620 Data size: 52080 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: struct) sort order: + Map-reduce partition columns: _col0 (type: struct) - Statistics: Num rows: 614 Data size: 51576 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 620 Data size: 52080 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Reducer 2 Reduce Operator Tree: @@ -704,13 +704,13 @@ STAGE PLANS: keys: KEY._col0 (type: struct) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 614 Data size: 51576 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 620 Data size: 52080 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (_col1 > 1L) (type: boolean) - Statistics: Num rows: 204 Data size: 17136 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 204 Data size: 17136 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/explainanalyze_5.q.out b/ql/src/test/results/clientpositive/tez/explainanalyze_5.q.out index ce8ab92..fd71c0c 100644 --- a/ql/src/test/results/clientpositive/tez/explainanalyze_5.q.out +++ b/ql/src/test/results/clientpositive/tez/explainanalyze_5.q.out @@ -304,7 +304,7 @@ Stage-3 Output:["_col0","_col1","_col3"] Filter Operator [FIL_9] (rows=10/2 width=316) predicate:((de = 109.23) or (de = 119.23)) - TableScan [TS_0] (rows=83/4 width=316) + TableScan [TS_0] (rows=86/4 width=316) default@acid_uami_n2,acid_uami_n2, ACID table,Tbl:COMPLETE,Col:NONE,Output:["i","de","vc"] PREHOOK: query: select * from acid_uami_n2 order by de diff --git a/ql/src/test/results/clientpositive/tez/orc_merge12.q.out b/ql/src/test/results/clientpositive/tez/orc_merge12.q.out index 673bad2..d29d704 100644 --- a/ql/src/test/results/clientpositive/tez/orc_merge12.q.out +++ b/ql/src/test/results/clientpositive/tez/orc_merge12.q.out @@ -161,8 +161,8 @@ Stripe Statistics: Column 6: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7 Column 7: count: 12288 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 442368 Column 8: count: 12288 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 884736 - Column 9: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 - Column 10: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 + Column 9: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 + Column 10: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 Column 11: count: 9174 hasNull: true true: 6138 Column 12: count: 9173 hasNull: true true: 3983 Column 13: count: 9173 hasNull: true min: -64 max: 62 sum: -39856 @@ -173,8 +173,8 @@ Stripe Statistics: Column 18: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7 Column 19: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881 Column 20: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134 - Column 21: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 - Column 22: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 + Column 21: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 + Column 22: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 Column 23: count: 9174 hasNull: true true: 6138 Column 24: count: 9173 hasNull: true true: 3983 Column 25: count: 9173 hasNull: true min: -64 max: 62 sum: -39856 @@ -185,8 +185,8 @@ Stripe Statistics: Column 30: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7 Column 31: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881 Column 32: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134 - Column 33: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 - Column 34: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 + Column 33: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 + Column 34: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 Column 35: count: 9174 hasNull: true true: 6138 Column 36: count: 9173 hasNull: true true: 3983 Stripe 2: @@ -199,8 +199,8 @@ Stripe Statistics: Column 6: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7 Column 7: count: 12288 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 442368 Column 8: count: 12288 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 884736 - Column 9: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 - Column 10: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 + Column 9: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 + Column 10: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 Column 11: count: 9174 hasNull: true true: 6138 Column 12: count: 9173 hasNull: true true: 3983 Column 13: count: 9173 hasNull: true min: -64 max: 62 sum: -39856 @@ -211,8 +211,8 @@ Stripe Statistics: Column 18: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7 Column 19: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881 Column 20: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134 - Column 21: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 - Column 22: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 + Column 21: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 + Column 22: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 Column 23: count: 9174 hasNull: true true: 6138 Column 24: count: 9173 hasNull: true true: 3983 Column 25: count: 9173 hasNull: true min: -64 max: 62 sum: -39856 @@ -223,8 +223,8 @@ Stripe Statistics: Column 30: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7 Column 31: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881 Column 32: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134 - Column 33: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 - Column 34: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 + Column 33: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 + Column 34: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 Column 35: count: 9174 hasNull: true true: 6138 Column 36: count: 9173 hasNull: true true: 3983 @@ -238,8 +238,8 @@ File Statistics: Column 6: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8 Column 7: count: 24576 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 884736 Column 8: count: 24576 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 1769472 - Column 9: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 - Column 10: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 + Column 9: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 + Column 10: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 Column 11: count: 18348 hasNull: true true: 12276 Column 12: count: 18346 hasNull: true true: 7966 Column 13: count: 18346 hasNull: true min: -64 max: 62 sum: -79712 @@ -250,8 +250,8 @@ File Statistics: Column 18: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8 Column 19: count: 18348 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 255762 Column 20: count: 18346 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 298268 - Column 21: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 - Column 22: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 + Column 21: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 + Column 22: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 Column 23: count: 18348 hasNull: true true: 12276 Column 24: count: 18346 hasNull: true true: 7966 Column 25: count: 18346 hasNull: true min: -64 max: 62 sum: -79712 @@ -262,8 +262,8 @@ File Statistics: Column 30: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8 Column 31: count: 18348 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 255762 Column 32: count: 18346 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 298268 - Column 33: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 - Column 34: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 + Column 33: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 + Column 34: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 Column 35: count: 18348 hasNull: true true: 12276 Column 36: count: 18346 hasNull: true true: 7966 @@ -457,11 +457,11 @@ Stripes: Entry 0: count: 10000 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 720000 positions: 0,0,0,0,0 Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 306445,195712,0,9766,272 Row group indices for column 9: - Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 + Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 Row group indices for column 10: - Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 + Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 Row group indices for column 11: Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0 Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4 @@ -493,11 +493,11 @@ Stripes: Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0 Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262 Row group indices for column 21: - Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 + Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 Row group indices for column 22: - Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 + Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 Row group indices for column 23: Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0 Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4 @@ -529,11 +529,11 @@ Stripes: Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0 Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262 Row group indices for column 33: - Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 + Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 Row group indices for column 34: - Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 + Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 Row group indices for column 35: Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0 Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4 @@ -729,11 +729,11 @@ Stripes: Entry 0: count: 10000 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 720000 positions: 0,0,0,0,0 Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 306445,195712,0,9766,272 Row group indices for column 9: - Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 + Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 Row group indices for column 10: - Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 + Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 Row group indices for column 11: Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0 Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4 @@ -765,11 +765,11 @@ Stripes: Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0 Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262 Row group indices for column 21: - Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 + Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 Row group indices for column 22: - Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 + Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 Row group indices for column 23: Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0 Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4 @@ -801,11 +801,11 @@ Stripes: Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0 Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262 Row group indices for column 33: - Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 + Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258 Row group indices for column 34: - Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 - Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 + Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0 + Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272 Row group indices for column 35: Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0 Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4 @@ -813,7 +813,7 @@ Stripes: Entry 0: count: 6889 hasNull: true true: 3402 positions: 0,0,0,0,0,0,0,0 Entry 1: count: 2284 hasNull: true true: 581 positions: 0,168,8,0,0,520,97,1 -File length: 3007982 bytes +File length: 3007984 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ diff --git a/ql/src/test/results/clientpositive/typechangetest.q.out b/ql/src/test/results/clientpositive/typechangetest.q.out index bbb53a0..5ca96a5 100644 --- a/ql/src/test/results/clientpositive/typechangetest.q.out +++ b/ql/src/test/results/clientpositive/typechangetest.q.out @@ -1183,9 +1183,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@testaltcolorc_n0 #### A masked pattern was here #### 1 2017-11-07 09:02:49.999999999 -2 1400-01-01 01:01:00.000000001 -3 1400-01-01 01:01:00.000000001 -4 1400-01-01 01:01:00.000000001 +2 1400-01-01 01:01:01.000000001 +3 1400-01-01 01:01:01.000000001 +4 1400-01-01 01:01:01.000000001 PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId PREHOOK: type: QUERY PREHOOK: Input: default@testaltcolorc_n0 @@ -1259,9 +1259,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@testaltcolorc_n0 #### A masked pattern was here #### 1 2017-11-07 09:02:49.999999999 -2 1400-01-01 01:01:00.000000001 -3 1400-01-01 01:01:00.000000001 -4 1400-01-01 01:01:00.000000001 +2 1400-01-01 01:01:01.000000001 +3 1400-01-01 01:01:01.000000001 +4 1400-01-01 01:01:01.000000001 PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId PREHOOK: type: QUERY PREHOOK: Input: default@testaltcolorc_n0 @@ -1335,9 +1335,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@testaltcolorc_n0 #### A masked pattern was here #### 1 2017-11-07 09:02:49.999999999 -2 1400-01-01 01:01:00.000000001 -3 1400-01-01 01:01:00.000000001 -4 1400-01-01 01:01:00.000000001 +2 1400-01-01 01:01:01.000000001 +3 1400-01-01 01:01:01.000000001 +4 1400-01-01 01:01:01.000000001 PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId PREHOOK: type: QUERY PREHOOK: Input: default@testaltcolorc_n0 @@ -1411,9 +1411,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@testaltcolorc_n0 #### A masked pattern was here #### 1 2017-11-07 09:02:49.999999999 -2 1400-01-01 01:01:00.000000001 -3 1400-01-01 01:01:00.000000001 -4 1400-01-01 01:01:00.000000001 +2 1400-01-01 01:01:01.000000001 +3 1400-01-01 01:01:01.000000001 +4 1400-01-01 01:01:01.000000001 PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId PREHOOK: type: QUERY PREHOOK: Input: default@testaltcolorc_n0 diff --git a/ql/src/test/results/clientpositive/vector_case_when_1.q.out b/ql/src/test/results/clientpositive/vector_case_when_1.q.out index b2ac291..66807ac 100644 --- a/ql/src/test/results/clientpositive/vector_case_when_1.q.out +++ b/ql/src/test/results/clientpositive/vector_case_when_1.q.out @@ -199,15 +199,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: lineitem_test - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: date), _col5 (type: double), _col6 (type: double), _col7 (type: decimal(10,2)), _col8 (type: decimal(10,2)), _col9 (type: decimal(12,2)), _col10 (type: decimal(12,2)), _col11 (type: decimal(10,2)), _col12 (type: decimal(10,2)), _col13 (type: timestamp), _col14 (type: int), _col15 (type: int), _col16 (type: date) Map Vectorization: enabled: true @@ -223,10 +223,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: date), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: decimal(10,2)), VALUE._col7 (type: decimal(10,2)), VALUE._col8 (type: decimal(12,2)), VALUE._col9 (type: decimal(12,2)), VALUE._col10 (type: decimal(10,2)), VALUE._col11 (type: decimal(10,2)), VALUE._col12 (type: timestamp), VALUE._col13 (type: int), VALUE._col14 (type: int), VALUE._col15 (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -525,7 +525,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: lineitem_test - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2), 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct] @@ -537,7 +537,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [4, 22, 24, 25, 26, 27, 28, 30, 31, 32, 33, 34, 35, 38, 40, 43, 44] selectExpressions: IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 21:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 22:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 21:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprStringScalarStringScalar(col 20:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string) -> 22:string) -> 21:string) -> 22:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 24:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprColumnNull(col 20:boolean, col 21:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean, ConstantVectorExpression(val Many) -> 21:string) -> 23:string) -> 24:string) -> 23:string) -> 24:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprNullNull(null, null) -> 23:string) -> 25:string) -> 23:string) -> 25:string, IfExprLongColumnLongColumn(col 17:boolean, col 18:date, col 19:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 17:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 18:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 19:date) -> 26:date, IfExprDoubleColumnLongScalar(col 17:boolean, col 28:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 27:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 27:double) -> 28:double) -> 27:double, IfExprDoubleColumnDoubleScalar(col 17:boolean, col 29:double, val 0.0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 28:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 28:double) -> 29:double) -> 28:double, IfExprNullColumn(col 17:boolean, null, col 7)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 17:boolean, col 7:decimal(10,2)) -> 30:decimal(10,2), IfExprColumnNull(col 18:boolean, col 7:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 18:boolean, col 7:decimal(10,2)) -> 31:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 32:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 33:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 34:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 35:decimal(10,2), IfExprTimestampColumnColumn(col 19:boolean, col 36:timestampcol 37:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 19:boolean, CastDateToTimestamp(col 12:date) -> 36:timestamp, CastDateToTimestamp(col 11:date) -> 37:timestamp) -> 38:timestamp, IfExprColumnNull(col 19:boolean, col 39:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 19:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 39:int) -> 40:int, IfExprNullColumn(col 41:boolean, null, col 42)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 41:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 42:int) -> 43:int, IfExprLongScalarLongScalar(col 45:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 44:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 44:int) -> 45:boolean) -> 44:date - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + @@ -546,7 +546,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: date), _col5 (type: double), _col6 (type: double), _col7 (type: decimal(10,2)), _col8 (type: decimal(10,2)), _col9 (type: decimal(12,2)), _col10 (type: decimal(12,2)), _col11 (type: decimal(10,2)), _col12 (type: decimal(10,2)), _col13 (type: timestamp), _col14 (type: int), _col15 (type: int), _col16 (type: date) Execution mode: vectorized Map Vectorization: @@ -572,10 +572,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: date), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: decimal(10,2)), VALUE._col7 (type: decimal(10,2)), VALUE._col8 (type: decimal(12,2)), VALUE._col9 (type: decimal(12,2)), VALUE._col10 (type: decimal(10,2)), VALUE._col11 (type: decimal(10,2)), VALUE._col12 (type: timestamp), VALUE._col13 (type: int), VALUE._col14 (type: int), VALUE._col15 (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -874,7 +874,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: lineitem_test - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2), 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct] @@ -886,7 +886,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [4, 27, 38, 48, 52, 54, 60, 63, 65, 67, 68, 69, 70, 73, 76, 79, 80] selectExpressions: IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 26:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, ConstantVectorExpression(val Single) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 19:boolean, ConstantVectorExpression(val Two) -> 20:string, IfExprColumnCondExpr(col 21:boolean, col 22:stringcol 24:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 21:boolean, ConstantVectorExpression(val Some) -> 22:string, IfExprStringScalarStringScalar(col 23:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 23:boolean) -> 24:string) -> 25:string) -> 26:string) -> 27:string, IfExprColumnCondExpr(col 23:boolean, col 28:stringcol 37:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 23:boolean, ConstantVectorExpression(val Single) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 36:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 29:boolean, ConstantVectorExpression(val Two) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 35:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 31:boolean, ConstantVectorExpression(val Some) -> 32:string, IfExprColumnNull(col 33:boolean, col 34:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 33:boolean, ConstantVectorExpression(val Many) -> 34:string) -> 35:string) -> 36:string) -> 37:string) -> 38:string, IfExprColumnCondExpr(col 39:boolean, col 40:stringcol 47:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 39:boolean, ConstantVectorExpression(val Single) -> 40:string, IfExprColumnCondExpr(col 41:boolean, col 42:stringcol 46:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 41:boolean, ConstantVectorExpression(val Two) -> 42:string, IfExprColumnCondExpr(col 43:boolean, col 44:stringcol 45:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 43:boolean, ConstantVectorExpression(val Some) -> 44:string, IfExprNullNull(null, null) -> 45:string) -> 46:string) -> 47:string) -> 48:string, IfExprCondExprCondExpr(col 49:boolean, col 50:datecol 51:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 49:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 50:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 51:date) -> 52:date, IfExprDoubleColumnLongScalar(col 57:boolean, col 58:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 54:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 54:double) -> 58:double) -> 54:double, IfExprCondExprColumn(col 57:boolean, col 59:double, col 58:double)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 58:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 58:double) -> 59:double, ConstantVectorExpression(val 0.0) -> 58:double) -> 60:double, IfExprNullColumn(col 62:boolean, null, col 7)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 62:boolean, col 7:decimal(10,2)) -> 63:decimal(10,2), IfExprColumnNull(col 64:boolean, col 7:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 64:boolean, col 7:decimal(10,2)) -> 65:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 67:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 68:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 69:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 70:decimal(10,2), IfExprCondExprCondExpr(col 66:boolean, col 71:timestampcol 72:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 66:boolean, CastDateToTimestamp(col 12:date) -> 71:timestamp, CastDateToTimestamp(col 11:date) -> 72:timestamp) -> 73:timestamp, IfExprCondExprNull(col 74:boolean, col 75:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 74:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 75:int) -> 76:int, IfExprNullCondExpr(col 77:boolean, null, col 78:int)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 77:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 78:int) -> 79:int, IfExprLongScalarLongScalar(col 81:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 80:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 80:int) -> 81:boolean) -> 80:date - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + @@ -895,7 +895,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: date), _col5 (type: double), _col6 (type: double), _col7 (type: decimal(10,2)), _col8 (type: decimal(10,2)), _col9 (type: decimal(12,2)), _col10 (type: decimal(12,2)), _col11 (type: decimal(10,2)), _col12 (type: decimal(10,2)), _col13 (type: timestamp), _col14 (type: int), _col15 (type: int), _col16 (type: date) Execution mode: vectorized Map Vectorization: @@ -921,10 +921,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: date), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: decimal(10,2)), VALUE._col7 (type: decimal(10,2)), VALUE._col8 (type: decimal(12,2)), VALUE._col9 (type: decimal(12,2)), VALUE._col10 (type: decimal(10,2)), VALUE._col11 (type: decimal(10,2)), VALUE._col12 (type: timestamp), VALUE._col13 (type: int), VALUE._col14 (type: int), VALUE._col15 (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 101 Data size: 78920 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_case_when_2.q.out b/ql/src/test/results/clientpositive/vector_case_when_2.q.out index 159c983..b8a5214 100644 --- a/ql/src/test/results/clientpositive/vector_case_when_2.q.out +++ b/ql/src/test/results/clientpositive/vector_case_when_2.q.out @@ -129,15 +129,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: timestamps - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00.0')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00.0')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: timestamp), _col10 (type: string), _col1 (type: timestamp) sort order: +++ - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: int), _col8 (type: int), _col9 (type: date) Map Vectorization: enabled: true @@ -153,10 +153,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey2 (type: timestamp), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: int), VALUE._col4 (type: string), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -365,7 +365,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: timestamps - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:cdate:date, 1:ctimestamp1:timestamp, 2:stimestamp1:string, 3:ctimestamp2:timestamp, 4:ROW__ID:struct] @@ -377,7 +377,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 3, 10, 12, 13, 14, 11, 7, 16, 23, 2] selectExpressions: IfExprStringScalarStringGroupColumn(col 5:boolean, val 1800s or Earliercol 9:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00.0) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val 1900scol 10:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 9:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprStringScalarStringScalar(col 8:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean) -> 9:string) -> 10:string) -> 9:string) -> 10:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 12:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprColumnNull(col 8:boolean, col 9:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean, ConstantVectorExpression(val Early 2010s) -> 9:string) -> 11:string) -> 12:string) -> 11:string) -> 12:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 13:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprNullNull(null, null) -> 11:string) -> 13:string) -> 11:string) -> 13:string, IfExprLongColumnLongColumn(col 5:boolean, col 6:int, col 7:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 5:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 7:int) -> 14:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 5:boolean) -> 11:string, IfExprNullColumn(col 5:boolean, null, col 6)(children: TimestampColEqualTimestampScalar(col 1:timestamp, val 2021-09-24 03:18:32.413655165) -> 5:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 6:int) -> 7:int, IfExprColumnNull(col 17:boolean, col 15:int, null)(children: ColAndCol(col 15:boolean, col 16:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 15:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 16:boolean) -> 17:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 15:int) -> 16:int, IfExprLongColumnLongColumn(col 20:boolean, col 21:date, col 22:date)(children: DoubleColGreaterDoubleScalar(col 19:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 18:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 18:double) -> 19:double) -> 20:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 21:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 22:date) -> 23:date - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: timestamp), _col10 (type: string), _col1 (type: timestamp) sort order: +++ @@ -386,7 +386,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: int), _col8 (type: int), _col9 (type: date) Execution mode: vectorized Map Vectorization: @@ -412,10 +412,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey2 (type: timestamp), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: int), VALUE._col4 (type: string), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -624,7 +624,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: timestamps - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:cdate:date, 1:ctimestamp1:timestamp, 2:stimestamp1:string, 3:ctimestamp2:timestamp, 4:ROW__ID:struct] @@ -636,7 +636,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 3, 15, 26, 36, 40, 42, 44, 46, 53, 2] selectExpressions: IfExprColumnCondExpr(col 5:boolean, col 6:stringcol 14:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00.0) -> 5:boolean, ConstantVectorExpression(val 1800s or Earlier) -> 6:string, IfExprColumnCondExpr(col 7:boolean, col 8:stringcol 13:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00.0) -> 7:boolean, ConstantVectorExpression(val 1900s) -> 8:string, IfExprColumnCondExpr(col 9:boolean, col 10:stringcol 12:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 9:boolean, ConstantVectorExpression(val Late 2000s) -> 10:string, IfExprStringScalarStringScalar(col 11:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 11:boolean) -> 12:string) -> 13:string) -> 14:string) -> 15:string, IfExprColumnCondExpr(col 11:boolean, col 16:stringcol 25:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 11:boolean, ConstantVectorExpression(val Old) -> 16:string, IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 24:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 17:boolean, ConstantVectorExpression(val Early 2000s) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 23:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 19:boolean, ConstantVectorExpression(val Late 2000s) -> 20:string, IfExprColumnNull(col 21:boolean, col 22:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 21:boolean, ConstantVectorExpression(val Early 2010s) -> 22:string) -> 23:string) -> 24:string) -> 25:string) -> 26:string, IfExprColumnCondExpr(col 27:boolean, col 28:stringcol 35:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 27:boolean, ConstantVectorExpression(val Old) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 34:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 29:boolean, ConstantVectorExpression(val Early 2000s) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 33:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 31:boolean, ConstantVectorExpression(val Late 2000s) -> 32:string, IfExprNullNull(null, null) -> 33:string) -> 34:string) -> 35:string) -> 36:string, IfExprCondExprCondExpr(col 37:boolean, col 38:intcol 39:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 37:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 38:int, VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 39:int) -> 40:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 41:boolean) -> 42:string, IfExprNullCondExpr(col 41:boolean, null, col 43:int)(children: TimestampColEqualTimestampScalar(col 1:timestamp, val 2021-09-24 03:18:32.413655165) -> 41:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 43:int) -> 44:int, IfExprCondExprNull(col 47:boolean, col 45:int, null)(children: ColAndCol(col 45:boolean, col 46:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 45:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 46:boolean) -> 47:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 45:int) -> 46:int, IfExprCondExprCondExpr(col 50:boolean, col 51:datecol 52:date)(children: DoubleColGreaterDoubleScalar(col 49:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 48:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 48:double) -> 49:double) -> 50:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 51:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 52:date) -> 53:date - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: timestamp), _col10 (type: string), _col1 (type: timestamp) sort order: +++ @@ -645,7 +645,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: int), _col8 (type: int), _col9 (type: date) Execution mode: vectorized Map Vectorization: @@ -671,10 +671,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey2 (type: timestamp), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: int), VALUE._col4 (type: string), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_char_2.q.out b/ql/src/test/results/clientpositive/vector_char_2.q.out index ae9910d..97038ee 100644 --- a/ql/src/test/results/clientpositive/vector_char_2.q.out +++ b/ql/src/test/results/clientpositive/vector_char_2.q.out @@ -84,7 +84,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: char_2_n0 - Statistics: Num rows: 501 Data size: 99168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 501 Data size: 99000 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -95,7 +95,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 3] selectExpressions: CastStringToLong(col 0:char(10)) -> 3:int - Statistics: Num rows: 501 Data size: 99168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 501 Data size: 99000 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col1), count() Group By Vectorization: @@ -109,7 +109,7 @@ STAGE PLANS: keys: _col0 (type: char(20)) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 501 Data size: 99168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 501 Data size: 99000 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: char(20)) sort order: + @@ -119,7 +119,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 501 Data size: 99168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 501 Data size: 99000 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint), _col2 (type: bigint) Execution mode: vectorized @@ -142,7 +142,7 @@ STAGE PLANS: keys: KEY._col0 (type: char(20)) mode: mergepartial outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 250 Data size: 49485 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 49401 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -164,7 +164,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 250 Data size: 49485 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 49401 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint), _col2 (type: bigint) Execution mode: vectorized @@ -185,7 +185,7 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: char(20)), VALUE._col0 (type: bigint), VALUE._col1 (type: bigint) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 250 Data size: 49485 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 49401 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 5 Statistics: Num rows: 5 Data size: 985 Basic stats: COMPLETE Column stats: NONE @@ -272,7 +272,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: char_2_n0 - Statistics: Num rows: 501 Data size: 99168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 501 Data size: 99000 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -283,7 +283,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 3] selectExpressions: CastStringToLong(col 0:char(10)) -> 3:int - Statistics: Num rows: 501 Data size: 99168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 501 Data size: 99000 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col1), count() Group By Vectorization: @@ -297,7 +297,7 @@ STAGE PLANS: keys: _col0 (type: char(20)) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 501 Data size: 99168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 501 Data size: 99000 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: char(20)) sort order: - @@ -307,7 +307,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 501 Data size: 99168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 501 Data size: 99000 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint), _col2 (type: bigint) Execution mode: vectorized @@ -330,7 +330,7 @@ STAGE PLANS: keys: KEY._col0 (type: char(20)) mode: mergepartial outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 250 Data size: 49485 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 49401 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -352,7 +352,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 250 Data size: 49485 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 49401 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint), _col2 (type: bigint) Execution mode: vectorized @@ -373,7 +373,7 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: char(20)), VALUE._col0 (type: bigint), VALUE._col1 (type: bigint) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 250 Data size: 49485 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 49401 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 5 Statistics: Num rows: 5 Data size: 985 Basic stats: COMPLETE Column stats: NONE diff --git a/ql/src/test/results/clientpositive/vector_coalesce_2.q.out b/ql/src/test/results/clientpositive/vector_coalesce_2.q.out index 48d38c3..c42d295 100644 --- a/ql/src/test/results/clientpositive/vector_coalesce_2.q.out +++ b/ql/src/test/results/clientpositive/vector_coalesce_2.q.out @@ -52,7 +52,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: str_str_orc - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -63,7 +63,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 5] selectExpressions: CastStringToLong(col 4:string)(children: VectorCoalesce(columns [0, 3])(children: col 0:string, ConstantVectorExpression(val 0) -> 3:string) -> 4:string) -> 5:int - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col1) Group By Vectorization: @@ -77,7 +77,7 @@ STAGE PLANS: keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + @@ -87,7 +87,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Map Vectorization: @@ -109,14 +109,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 271 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), round((UDFToDouble(_col1) / 60.0D), 2) (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 271 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 271 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -167,7 +167,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: str_str_orc - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -178,13 +178,13 @@ STAGE PLANS: native: true projectedOutputColumnNums: [4] selectExpressions: VectorCoalesce(columns [0, 3])(children: col 0:string, ConstantVectorExpression(val 0) -> 3:string) -> 4:string - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -247,7 +247,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: str_str_orc - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -258,7 +258,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 5] selectExpressions: CastStringToLong(col 4:string)(children: VectorCoalesce(columns [0, 3])(children: col 0:string, ConstantVectorExpression(val 0) -> 3:string) -> 4:string) -> 5:int - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col1) Group By Vectorization: @@ -272,7 +272,7 @@ STAGE PLANS: keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + @@ -282,7 +282,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Map Vectorization: @@ -304,14 +304,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 271 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), round((UDFToDouble(_col1) / 60.0D), 2) (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 271 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 271 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -362,7 +362,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: str_str_orc - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -373,13 +373,13 @@ STAGE PLANS: native: true projectedOutputColumnNums: [4] selectExpressions: VectorCoalesce(columns [0, 3])(children: col 0:string, ConstantVectorExpression(val 0) -> 3:string) -> 4:string - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 5 Data size: 678 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_data_types.q.out b/ql/src/test/results/clientpositive/vector_data_types.q.out index c2a2fce..b72340d 100644 --- a/ql/src/test/results/clientpositive/vector_data_types.q.out +++ b/ql/src/test/results/clientpositive/vector_data_types.q.out @@ -128,22 +128,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: over1korc_n1 - Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), i (type: int), b (type: bigint), f (type: float), d (type: double), bo (type: boolean), s (type: string), ts (type: timestamp), dec (type: decimal(4,2)), bin (type: binary) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int) sort order: +++ - Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: boolean), _col7 (type: string), _col8 (type: timestamp), _col9 (type: decimal(4,2)), _col10 (type: binary) Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: boolean), VALUE._col4 (type: string), VALUE._col5 (type: timestamp), VALUE._col6 (type: decimal(4,2)), VALUE._col7 (type: binary) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 20 Statistics: Num rows: 20 Data size: 5920 Basic stats: COMPLETE Column stats: NONE @@ -218,7 +218,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: over1korc_n1 - Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -228,7 +228,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] - Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int) sort order: +++ @@ -237,7 +237,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: boolean), _col7 (type: string), _col8 (type: timestamp), _col9 (type: decimal(4,2)), _col10 (type: binary) Execution mode: vectorized @@ -258,7 +258,7 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: boolean), VALUE._col4 (type: string), VALUE._col5 (type: timestamp), VALUE._col6 (type: decimal(4,2)), VALUE._col7 (type: binary) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 20 Statistics: Num rows: 20 Data size: 5920 Basic stats: COMPLETE Column stats: NONE @@ -326,7 +326,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: over1korc_n1 - Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -337,7 +337,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [12] selectExpressions: VectorUDFAdaptor(hash(t,si,i,b,f,d,bo,s,ts,dec,bin)) -> 12:int - Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) Group By Vectorization: diff --git a/ql/src/test/results/clientpositive/vector_interval_1.q.out b/ql/src/test/results/clientpositive/vector_interval_1.q.out index 8c0086e..70b7c66 100644 --- a/ql/src/test/results/clientpositive/vector_interval_1.q.out +++ b/ql/src/test/results/clientpositive/vector_interval_1.q.out @@ -80,7 +80,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vector_interval_1 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -91,7 +91,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [2, 5, 6] selectExpressions: CastStringToIntervalYearMonth(col 2:string) -> 5:interval_year_month, CastStringToIntervalDayTime(col 3:string) -> 6:interval_day_time - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + @@ -100,7 +100,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_year_month), _col2 (type: interval_day_time) Execution mode: vectorized Map Vectorization: @@ -120,10 +120,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string), INTERVAL'1-2' (type: interval_year_month), VALUE._col0 (type: interval_year_month), INTERVAL'1 02:03:04.000000000' (type: interval_day_time), VALUE._col1 (type: interval_day_time) outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -191,7 +191,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vector_interval_1 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -202,7 +202,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 7, 6, 9, 8] selectExpressions: IntervalYearMonthColAddIntervalYearMonthColumn(col 5:interval_year_month, col 6:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 5:interval_year_month, CastStringToIntervalYearMonth(col 2:string) -> 6:interval_year_month) -> 7:interval_year_month, IntervalYearMonthScalarAddIntervalYearMonthColumn(val 14, col 5:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 5:interval_year_month) -> 6:interval_year_month, IntervalYearMonthColSubtractIntervalYearMonthColumn(col 5:interval_year_month, col 8:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 5:interval_year_month, CastStringToIntervalYearMonth(col 2:string) -> 8:interval_year_month) -> 9:interval_year_month, IntervalYearMonthScalarSubtractIntervalYearMonthColumn(val 14, col 5:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 5:interval_year_month) -> 8:interval_year_month - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + @@ -211,7 +211,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_year_month), _col2 (type: interval_year_month), _col3 (type: interval_year_month), _col4 (type: interval_year_month) Execution mode: vectorized Map Vectorization: @@ -231,10 +231,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: date), INTERVAL'2-4' (type: interval_year_month), VALUE._col0 (type: interval_year_month), VALUE._col1 (type: interval_year_month), INTERVAL'0-0' (type: interval_year_month), VALUE._col2 (type: interval_year_month), VALUE._col3 (type: interval_year_month) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -310,7 +310,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vector_interval_1 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -321,7 +321,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 7, 6, 9, 8] selectExpressions: IntervalDayTimeColAddIntervalDayTimeColumn(col 5:interval_day_time, col 6:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 5:interval_day_time, CastStringToIntervalDayTime(col 3:string) -> 6:interval_day_time) -> 7:interval_day_time, IntervalDayTimeScalarAddIntervalDayTimeColumn(val 1 02:03:04.000000000, col 5:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 5:interval_day_time) -> 6:interval_day_time, IntervalDayTimeColSubtractIntervalDayTimeColumn(col 5:interval_day_time, col 8:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 5:interval_day_time, CastStringToIntervalDayTime(col 3:string) -> 8:interval_day_time) -> 9:interval_day_time, IntervalDayTimeScalarSubtractIntervalDayTimeColumn(val 1 02:03:04.000000000, col 5:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 5:interval_day_time) -> 8:interval_day_time - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + @@ -330,7 +330,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time), _col4 (type: interval_day_time) Execution mode: vectorized Map Vectorization: @@ -350,10 +350,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: date), INTERVAL'2 04:06:08.000000000' (type: interval_day_time), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), INTERVAL'0 00:00:00.000000000' (type: interval_day_time), VALUE._col2 (type: interval_day_time), VALUE._col3 (type: interval_day_time) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -441,7 +441,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vector_interval_1 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -452,7 +452,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 5, 7, 6, 9, 8, 11, 12, 14, 15, 16, 17, 18] selectExpressions: DateColAddIntervalYearMonthScalar(col 1:date, val 1-2) -> 5:date, DateColAddIntervalYearMonthColumn(col 1:date, col 6:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 6:interval_year_month) -> 7:date, IntervalYearMonthScalarAddDateColumn(val 1-2, col 1:interval_year_month) -> 6:date, IntervalYearMonthColAddDateColumn(col 8:interval_year_month, col 1:date)(children: CastStringToIntervalYearMonth(col 2:string) -> 8:interval_year_month) -> 9:date, DateColSubtractIntervalYearMonthScalar(col 1:date, val 1-2) -> 8:date, DateColSubtractIntervalYearMonthColumn(col 1:date, col 10:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 10:interval_year_month) -> 11:date, DateColAddIntervalDayTimeScalar(col 1:date, val 1 02:03:04.000000000) -> 12:timestamp, DateColAddIntervalDayTimeColumn(col 1:date, col 13:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 14:timestamp, IntervalDayTimeScalarAddDateColumn(val 1 02:03:04.000000000, col 1:date) -> 15:timestamp, IntervalDayTimeColAddDateColumn(col 13:interval_day_time, col 1:date)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 16:timestamp, DateColSubtractIntervalDayTimeScalar(col 1:date, val 1 02:03:04.000000000) -> 17:timestamp, DateColSubtractIntervalDayTimeColumn(col 1:date, col 13:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 18:timestamp - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + @@ -461,7 +461,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: date), _col2 (type: date), _col3 (type: date), _col4 (type: date), _col5 (type: date), _col6 (type: date), _col7 (type: timestamp), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp) Execution mode: vectorized Map Vectorization: @@ -481,10 +481,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: date), VALUE._col1 (type: date), VALUE._col2 (type: date), VALUE._col3 (type: date), VALUE._col4 (type: date), VALUE._col5 (type: date), VALUE._col6 (type: timestamp), VALUE._col7 (type: timestamp), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -584,7 +584,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vector_interval_1 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -595,7 +595,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [0, 5, 7, 8, 9, 10, 11, 12, 14, 15, 16, 17, 18] selectExpressions: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 5:timestamp, TimestampColAddIntervalYearMonthColumn(col 0:timestamp, col 6:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 6:interval_year_month) -> 7:timestamp, IntervalYearMonthScalarAddTimestampColumn(val 1-2, col 0:interval_year_month) -> 8:timestamp, IntervalYearMonthColAddTimestampColumn(col 6:interval_year_month, col 0:timestamp)(children: CastStringToIntervalYearMonth(col 2:string) -> 6:interval_year_month) -> 9:timestamp, TimestampColSubtractIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 10:timestamp, TimestampColSubtractIntervalYearMonthColumn(col 0:timestamp, col 6:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 6:interval_year_month) -> 11:timestamp, TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 02:03:04.000000000) -> 12:timestamp, TimestampColAddIntervalDayTimeColumn(col 0:timestamp, col 13:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 14:timestamp, IntervalDayTimeScalarAddTimestampColumn(val 1 02:03:04.000000000, col 0:timestamp) -> 15:timestamp, IntervalDayTimeColAddTimestampColumn(col 13:interval_day_time, col 0:timestamp)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 16:timestamp, TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 02:03:04.000000000) -> 17:timestamp, TimestampColSubtractIntervalDayTimeColumn(col 0:timestamp, col 13:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 18:timestamp - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: timestamp) sort order: + @@ -604,7 +604,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp), _col7 (type: timestamp), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp) Execution mode: vectorized Map Vectorization: @@ -624,10 +624,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: timestamp), VALUE._col1 (type: timestamp), VALUE._col2 (type: timestamp), VALUE._col3 (type: timestamp), VALUE._col4 (type: timestamp), VALUE._col5 (type: timestamp), VALUE._col6 (type: timestamp), VALUE._col7 (type: timestamp), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -709,7 +709,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vector_interval_1 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -720,7 +720,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [0, 5, 6, 7] selectExpressions: TimestampColSubtractTimestampColumn(col 0:timestamp, col 0:timestamp) -> 5:interval_day_time, TimestampScalarSubtractTimestampColumn(val 2001-01-01 01:02:03.0, col 0:timestamp) -> 6:interval_day_time, TimestampColSubtractTimestampScalar(col 0:timestamp, val 2001-01-01 01:02:03.0) -> 7:interval_day_time - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: timestamp) sort order: + @@ -729,7 +729,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time) Execution mode: vectorized Map Vectorization: @@ -749,10 +749,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -816,7 +816,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vector_interval_1 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -827,7 +827,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 5, 6, 7] selectExpressions: DateColSubtractDateColumn(col 1:date, col 1:date) -> 5:interval_day_time, DateScalarSubtractDateColumn(val 2001-01-01 00:00:00.0, col 1:date) -> 6:interval_day_time, DateColSubtractDateScalar(col 1:date, val 2001-01-01 00:00:00.0) -> 7:interval_day_time - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + @@ -836,7 +836,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time) Execution mode: vectorized Map Vectorization: @@ -856,10 +856,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -929,7 +929,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vector_interval_1 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -940,7 +940,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 5, 6, 7, 8, 9, 10] selectExpressions: TimestampColSubtractDateColumn(col 0:timestamp, col 1:date) -> 5:interval_day_time, TimestampScalarSubtractDateColumn(val 2001-01-01 01:02:03.0, col 1:date) -> 6:interval_day_time, TimestampColSubtractDateScalar(col 0:timestamp, val 2001-01-01 00:00:00.0) -> 7:interval_day_time, DateColSubtractTimestampColumn(col 1:date, col 0:timestamp) -> 8:interval_day_time, DateColSubtractTimestampScalar(col 1:date, val 2001-01-01 01:02:03.0) -> 9:interval_day_time, DateScalarSubtractTimestampColumn(val 2001-01-01 00:00:00.0, col 0:timestamp) -> 10:interval_day_time - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + @@ -949,7 +949,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time), _col4 (type: interval_day_time), _col5 (type: interval_day_time), _col6 (type: interval_day_time) Execution mode: vectorized Map Vectorization: @@ -969,10 +969,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time), VALUE._col3 (type: interval_day_time), VALUE._col4 (type: interval_day_time), VALUE._col5 (type: interval_day_time) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out index 01e915b..9f90e82 100644 --- a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out +++ b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out @@ -253,7 +253,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -264,7 +264,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13, 0, 1, 3, 14, 15, 16, 17, 18] selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 9:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 10:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 11:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 12:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 13:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-02-02 16:31:57.778) -> 14:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 08:42:30.0005, col 1:timestamp) -> 15:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 16:timestamp, IfExprColumnNull(col 0:boolean, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 17:timestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 18:timestamp - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -273,7 +273,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp) Execution mode: vectorized Map Vectorization: @@ -293,10 +293,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: int), VALUE._col8 (type: boolean), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp), VALUE._col12 (type: timestamp), VALUE._col13 (type: timestamp), VALUE._col14 (type: timestamp), VALUE._col15 (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -446,7 +446,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -457,7 +457,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13] selectExpressions: VectorUDFUnixTimeStampString(col 2:string) -> 5:bigint, VectorUDFYearString(col 2:string, fieldStart 0, fieldLength 4) -> 6:int, VectorUDFMonthString(col 2:string, fieldStart 5, fieldLength 2) -> 7:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 8:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 9:int, VectorUDFWeekOfYearString(col 2:string) -> 10:int, VectorUDFHourString(col 2:string, fieldStart 11, fieldLength 2) -> 11:int, VectorUDFMinuteString(col 2:string, fieldStart 14, fieldLength 2) -> 12:int, VectorUDFSecondString(col 2:string, fieldStart 17, fieldLength 2) -> 13:int - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -466,7 +466,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int) Execution mode: vectorized Map Vectorization: @@ -486,10 +486,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -623,7 +623,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -634,7 +634,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [7, 8, 9, 10, 11, 12, 13, 14, 15] selectExpressions: LongColEqualLongColumn(col 5:bigint, col 6:bigint)(children: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFUnixTimeStampString(col 2:string) -> 6:bigint) -> 7:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 5:int, VectorUDFYearString(col 2:string, fieldStart 0, fieldLength 4) -> 6:int) -> 8:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 5:int, VectorUDFMonthString(col 2:string, fieldStart 5, fieldLength 2) -> 6:int) -> 9:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 5:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 6:int) -> 10:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 5:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 6:int) -> 11:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 5:int, VectorUDFWeekOfYearString(col 2:string) -> 6:int) -> 12:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 5:int, VectorUDFHourString(col 2:string, fieldStart 11, fieldLength 2) -> 6:int) -> 13:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 5:int, VectorUDFMinuteString(col 2:string, fieldStart 14, fieldLength 2) -> 6:int) -> 14:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 5:int, VectorUDFSecondString(col 2:string, fieldStart 17, fieldLength 2) -> 6:int) -> 15:boolean - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + @@ -643,7 +643,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean) Execution mode: vectorized Map Vectorization: @@ -663,10 +663,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: boolean), VALUE._col0 (type: boolean), VALUE._col1 (type: boolean), VALUE._col2 (type: boolean), VALUE._col3 (type: boolean), VALUE._col4 (type: boolean), VALUE._col5 (type: boolean), VALUE._col6 (type: boolean), VALUE._col7 (type: boolean) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -916,7 +916,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -926,7 +926,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1] - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: min(ctimestamp1), max(ctimestamp1), count(ctimestamp1), count() Group By Vectorization: @@ -1023,7 +1023,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -1033,7 +1033,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1] - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(ctimestamp1) Group By Vectorization: @@ -1142,7 +1142,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -1153,7 +1153,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 5, 8] selectExpressions: CastTimestampToDouble(col 1:timestamp) -> 5:double, DoubleColMultiplyDoubleColumn(col 6:double, col 7:double)(children: CastTimestampToDouble(col 1:timestamp) -> 6:double, CastTimestampToDouble(col 1:timestamp) -> 7:double) -> 8:double - Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0), count(_col0), sum(_col2), sum(_col1) Group By Vectorization: diff --git a/streaming/src/test/org/apache/hive/streaming/TestStreaming.java b/streaming/src/test/org/apache/hive/streaming/TestStreaming.java index 042fdbe..1f05d88 100644 --- a/streaming/src/test/org/apache/hive/streaming/TestStreaming.java +++ b/streaming/src/test/org/apache/hive/streaming/TestStreaming.java @@ -2077,15 +2077,15 @@ public void testFileDumpDeltaFilesWithStreamingOptimizations() throws Exception Assert.assertEquals(true, outDump.contains("Compression: NONE")); // no stats/indexes Assert.assertEquals(true, outDump.contains("Column 0: count: 0 hasNull: false")); - Assert.assertEquals(true, outDump.contains("Column 1: count: 0 hasNull: false sum: 0")); - Assert.assertEquals(true, outDump.contains("Column 2: count: 0 hasNull: false sum: 0")); - Assert.assertEquals(true, outDump.contains("Column 3: count: 0 hasNull: false sum: 0")); - Assert.assertEquals(true, outDump.contains("Column 4: count: 0 hasNull: false sum: 0")); - Assert.assertEquals(true, outDump.contains("Column 5: count: 0 hasNull: false sum: 0")); + Assert.assertEquals(true, outDump.contains("Column 1: count: 0 hasNull: false bytesOnDisk: 12 sum: 0")); + Assert.assertEquals(true, outDump.contains("Column 2: count: 0 hasNull: false bytesOnDisk: 12 sum: 0")); + Assert.assertEquals(true, outDump.contains("Column 3: count: 0 hasNull: false bytesOnDisk: 24 sum: 0")); + Assert.assertEquals(true, outDump.contains("Column 4: count: 0 hasNull: false bytesOnDisk: 14 sum: 0")); + Assert.assertEquals(true, outDump.contains("Column 5: count: 0 hasNull: false bytesOnDisk: 12 sum: 0")); Assert.assertEquals(true, outDump.contains("Column 6: count: 0 hasNull: false")); - Assert.assertEquals(true, outDump.contains("Column 7: count: 0 hasNull: false")); - Assert.assertEquals(true, outDump.contains("Column 8: count: 0 hasNull: false sum: 0")); - Assert.assertEquals(true, outDump.contains("Column 9: count: 0 hasNull: false")); + Assert.assertEquals(true, outDump.contains("Column 7: count: 0 hasNull: false bytesOnDisk: 11864")); + Assert.assertEquals(true, outDump.contains("Column 8: count: 0 hasNull: false bytesOnDisk: 2033 sum: 0")); + Assert.assertEquals(true, outDump.contains("Column 9: count: 0 hasNull: false bytesOnDisk: 13629")); // no dictionary Assert.assertEquals(true, outDump.contains("Encoding column 7: DIRECT_V2")); Assert.assertEquals(true, outDump.contains("Encoding column 9: DIRECT_V2"));