diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index b584c72..1ada664 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -539,6 +539,9 @@ minillaplocal.query.files=\ llap_partitioned.q,\ llap_smb.q,\ llap_vector_nohybridgrace.q,\ + llap_uncompressed.q,\ + llap_decimal64_reader.q,\ + llap_text.q,\ load_data_acid_rename.q,\ load_data_using_job.q,\ load_dyn_part5.q,\ diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java index 40f7c83..ac1aca8 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.llap.io.api.impl; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.BatchToRowInputFormat; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -232,4 +233,9 @@ static TableScanOperator findTsOp(MapWork mapWork) throws HiveException { } return tableScanOperator; } + + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return new VectorizedSupport.Support[] {VectorizedSupport.Support.DECIMAL_64}; + } } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java index 7af1b05..32f3bed 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java @@ -31,13 +31,11 @@ import org.apache.hadoop.hive.llap.counters.QueryFragmentCounters; import org.apache.hadoop.hive.llap.io.api.impl.ColumnVectorBatch; import org.apache.hadoop.hive.llap.io.api.impl.LlapIoImpl; -import org.apache.hadoop.hive.llap.io.decode.ColumnVectorProducer.Includes; import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader; import org.apache.hadoop.hive.llap.io.metadata.ConsumerFileMetadata; import org.apache.hadoop.hive.llap.io.metadata.ConsumerStripeMetadata; import org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheMetrics; import org.apache.hadoop.hive.llap.metrics.LlapDaemonIOMetrics; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.orc.encoded.Consumer; import org.apache.hadoop.hive.ql.io.orc.encoded.IoTrace; @@ -52,8 +50,8 @@ import org.apache.hadoop.mapred.Reporter; import org.apache.hive.common.util.FixedSizedObjectPool; import org.apache.orc.CompressionKind; +import org.apache.orc.OrcFile; import org.apache.orc.OrcProto; -import org.apache.orc.OrcUtils; import org.apache.orc.OrcProto.ColumnEncoding; import org.apache.orc.OrcProto.RowIndex; import org.apache.orc.OrcProto.RowIndexEntry; @@ -289,5 +287,10 @@ public CompressionKind getCompressionKind() { public TypeDescription getSchema() { return schema; } + + @Override + public OrcFile.Version getFileVersion() { + return null; + } } } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java index feccb87..0d7435c 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.llap.metrics.LlapDaemonIOMetrics; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; @@ -73,10 +74,11 @@ private IoTrace trace; private final Includes includes; private TypeDescription[] batchSchemas; + private boolean useDecimal64ColumnVectors; public OrcEncodedDataConsumer( - Consumer consumer, Includes includes, boolean skipCorrupt, - QueryFragmentCounters counters, LlapDaemonIOMetrics ioMetrics) { + Consumer consumer, Includes includes, boolean skipCorrupt, + QueryFragmentCounters counters, LlapDaemonIOMetrics ioMetrics) { super(consumer, includes.getPhysicalColumnIds().size(), ioMetrics); this.includes = includes; // TODO: get rid of this @@ -84,6 +86,10 @@ public OrcEncodedDataConsumer( this.counters = counters; } + public void setUseDecimal64ColumnVectors(final boolean useDecimal64ColumnVectors) { + this.useDecimal64ColumnVectors = useDecimal64ColumnVectors; + } + public void setFileMetadata(ConsumerFileMetadata f) { assert fileMetadata == null; fileMetadata = f; @@ -153,7 +159,7 @@ protected void decodeBatch(OrcEncodedColumnBatch batch, if (cvb.cols[idx] == null) { // Orc store rows inside a root struct (hive writes it this way). // When we populate column vectors we skip over the root struct. - cvb.cols[idx] = createColumn(batchSchemas[idx], VectorizedRowBatch.DEFAULT_SIZE); + cvb.cols[idx] = createColumn(batchSchemas[idx], VectorizedRowBatch.DEFAULT_SIZE, useDecimal64ColumnVectors); } trace.logTreeReaderNextVector(idx); @@ -217,10 +223,10 @@ private void createColumnReaders(OrcEncodedColumnBatch batch, TreeReaderFactory.Context context = new TreeReaderFactory.ReaderContext() .setSchemaEvolution(evolution).skipCorrupt(skipCorrupt) .writerTimeZone(stripeMetadata.getWriterTimezone()) - ; + .fileFormat(fileMetadata == null ? null : fileMetadata.getFileVersion()); this.batchSchemas = includes.getBatchReaderTypes(fileSchema); StructTreeReader treeReader = EncodedTreeReaderFactory.createRootTreeReader( - batchSchemas, stripeMetadata.getEncodings(), batch, codec, context); + batchSchemas, stripeMetadata.getEncodings(), batch, codec, context, useDecimal64ColumnVectors); this.columnReaders = treeReader.getChildReaders(); if (LlapIoImpl.LOG.isDebugEnabled()) { @@ -232,7 +238,7 @@ private void createColumnReaders(OrcEncodedColumnBatch batch, positionInStreams(columnReaders, batch.getBatchKey(), stripeMetadata); } - private ColumnVector createColumn(TypeDescription type, int batchSize) { + private ColumnVector createColumn(TypeDescription type, int batchSize, final boolean useDecimal64ColumnVectors) { switch (type.getCategory()) { case BOOLEAN: case BYTE: @@ -252,30 +258,34 @@ private ColumnVector createColumn(TypeDescription type, int batchSize) { case TIMESTAMP: return new TimestampColumnVector(batchSize); case DECIMAL: - return new DecimalColumnVector(batchSize, type.getPrecision(), - type.getScale()); + if (useDecimal64ColumnVectors && type.getPrecision() <= TypeDescription.MAX_DECIMAL64_PRECISION) { + return new Decimal64ColumnVector(batchSize, type.getPrecision(), type.getScale()); + } else { + return new DecimalColumnVector(batchSize, type.getPrecision(), type.getScale()); + } case STRUCT: { List subtypeIdxs = type.getChildren(); ColumnVector[] fieldVector = new ColumnVector[subtypeIdxs.size()]; - for(int i = 0; i < fieldVector.length; ++i) { - fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize); + for (int i = 0; i < fieldVector.length; ++i) { + fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize, useDecimal64ColumnVectors); } return new StructColumnVector(batchSize, fieldVector); } case UNION: { List subtypeIdxs = type.getChildren(); ColumnVector[] fieldVector = new ColumnVector[subtypeIdxs.size()]; - for(int i=0; i < fieldVector.length; ++i) { - fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize); + for (int i = 0; i < fieldVector.length; ++i) { + fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize, useDecimal64ColumnVectors); } return new UnionColumnVector(batchSize, fieldVector); } case LIST: - return new ListColumnVector(batchSize, createColumn(type.getChildren().get(0), batchSize)); + return new ListColumnVector(batchSize, createColumn(type.getChildren().get(0), batchSize, + useDecimal64ColumnVectors)); case MAP: List subtypeIdxs = type.getChildren(); - return new MapColumnVector(batchSize, createColumn(subtypeIdxs.get(0), batchSize), - createColumn(subtypeIdxs.get(1), batchSize)); + return new MapColumnVector(batchSize, createColumn(subtypeIdxs.get(0), batchSize, useDecimal64ColumnVectors), + createColumn(subtypeIdxs.get(1), batchSize, useDecimal64ColumnVectors)); default: throw new IllegalArgumentException("LLAP does not support " + type.getCategory()); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java index 2947c16..b76b0de 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java @@ -24,37 +24,17 @@ import java.util.Arrays; import java.util.List; -import org.apache.hadoop.hive.llap.counters.LlapIOCounters; -import org.apache.orc.CompressionCodec; -import org.apache.orc.OrcProto.BloomFilterIndex; -import org.apache.orc.OrcProto.FileTail; -import org.apache.orc.OrcProto.RowIndex; -import org.apache.orc.OrcProto.Stream; -import org.apache.orc.OrcProto.StripeStatistics; -import org.apache.orc.TypeDescription; -import org.apache.orc.impl.BufferChunk; -import org.apache.orc.impl.DataReaderProperties; -import org.apache.orc.impl.InStream; -import org.apache.orc.impl.OrcCodecPool; -import org.apache.orc.impl.OrcIndex; -import org.apache.orc.impl.OrcTail; -import org.apache.orc.impl.ReaderImpl; -import org.apache.orc.impl.SchemaEvolution; -import org.apache.orc.impl.WriterImpl; -import org.apache.tez.common.counters.TezCounters; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.Pool; import org.apache.hadoop.hive.common.Pool.PoolObjectHelper; +import org.apache.hadoop.hive.common.io.Allocator; import org.apache.hadoop.hive.common.io.Allocator.BufferObjectFactory; import org.apache.hadoop.hive.common.io.DataCache; -import org.apache.hadoop.hive.common.io.Allocator; -import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData; import org.apache.hadoop.hive.common.io.DiskRange; import org.apache.hadoop.hive.common.io.DiskRangeList; +import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData; import org.apache.hadoop.hive.common.io.encoded.MemoryBuffer; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -65,38 +45,58 @@ import org.apache.hadoop.hive.llap.cache.LlapDataBuffer; import org.apache.hadoop.hive.llap.cache.LowLevelCache; import org.apache.hadoop.hive.llap.cache.LowLevelCache.Priority; +import org.apache.hadoop.hive.llap.counters.LlapIOCounters; import org.apache.hadoop.hive.llap.counters.QueryFragmentCounters; import org.apache.hadoop.hive.llap.io.api.impl.LlapIoImpl; import org.apache.hadoop.hive.llap.io.decode.ColumnVectorProducer.Includes; import org.apache.hadoop.hive.llap.io.decode.ColumnVectorProducer.SchemaEvolutionFactory; import org.apache.hadoop.hive.llap.io.decode.OrcEncodedDataConsumer; -import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata; import org.apache.hadoop.hive.llap.io.metadata.MetadataCache; import org.apache.hadoop.hive.llap.io.metadata.MetadataCache.LlapBufferOrBuffers; +import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata; import org.apache.hadoop.hive.llap.io.metadata.OrcStripeMetadata; import org.apache.hadoop.hive.ql.io.HdfsUtils; -import org.apache.orc.CompressionKind; -import org.apache.orc.DataReader; import org.apache.hadoop.hive.ql.io.orc.OrcFile; import org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions; -import org.apache.orc.OrcConf; import org.apache.hadoop.hive.ql.io.orc.OrcSplit; -import org.apache.hadoop.hive.ql.io.orc.encoded.Reader; import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl; import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedOrcFile; import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedReader; import org.apache.hadoop.hive.ql.io.orc.encoded.IoTrace; import org.apache.hadoop.hive.ql.io.orc.encoded.OrcBatchKey; +import org.apache.hadoop.hive.ql.io.orc.encoded.Reader; import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch; import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.PoolFactory; -import org.apache.orc.impl.RecordReaderUtils; -import org.apache.orc.StripeInformation; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hive.common.util.FixedSizedObjectPool; +import org.apache.orc.CompressionCodec; +import org.apache.orc.CompressionKind; +import org.apache.orc.DataReader; +import org.apache.orc.OrcConf; import org.apache.orc.OrcProto; +import org.apache.orc.OrcProto.BloomFilterIndex; +import org.apache.orc.OrcProto.FileTail; +import org.apache.orc.OrcProto.RowIndex; +import org.apache.orc.OrcProto.Stream; +import org.apache.orc.OrcProto.StripeStatistics; +import org.apache.orc.StripeInformation; +import org.apache.orc.TypeDescription; +import org.apache.orc.impl.BufferChunk; +import org.apache.orc.impl.DataReaderProperties; +import org.apache.orc.impl.InStream; +import org.apache.orc.impl.OrcCodecPool; +import org.apache.orc.impl.OrcIndex; +import org.apache.orc.impl.OrcTail; +import org.apache.orc.impl.ReaderImpl; +import org.apache.orc.impl.RecordReaderUtils; +import org.apache.orc.impl.SchemaEvolution; +import org.apache.orc.impl.WriterImpl; import org.apache.tez.common.CallableWithNdc; +import org.apache.tez.common.counters.TezCounters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; @@ -231,6 +231,8 @@ public OrcEncodedDataReader(LowLevelCache lowLevelCache, BufferUsageManager buff this.jobConf = jobConf; // TODO: setFileMetadata could just create schema. Called in two places; clean up later. this.evolution = sef.createSchemaEvolution(fileMetadata.getSchema()); + consumer.setUseDecimal64ColumnVectors(HiveConf.getVar(jobConf, + ConfVars.HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED).equalsIgnoreCase("decimal_64")); consumer.setFileMetadata(fileMetadata); consumer.setSchemaEvolution(evolution); } @@ -569,7 +571,8 @@ private OrcFileMetadata getFileFooterFromCacheOrDisk() throws IOException { stripes.add(new ReaderImpl.StripeInformationImpl(stripeProto)); } return new OrcFileMetadata( - fileKey, tail.getFooter(), tail.getPostscript(), stats, stripes); + fileKey, tail.getFooter(), tail.getPostscript(), stats, stripes, + ReaderImpl.getFileVersion(tail.getPostscript().getVersionList())); } finally { // We don't need the buffer anymore. metadataCache.decRefBuffer(tailBuffers); @@ -586,7 +589,7 @@ private OrcFileMetadata getFileFooterFromCacheOrDisk() throws IOException { } FileTail ft = orcReader.getFileTail(); return new OrcFileMetadata(fileKey, ft.getFooter(), ft.getPostscript(), - orcReader.getOrcProtoStripeStatistics(), orcReader.getStripes()); + orcReader.getOrcProtoStripeStatistics(), orcReader.getStripes(), orcReader.getFileVersion()); } private OrcProto.StripeFooter buildStripeFooter( diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java index bed5887..5b54af5 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java @@ -221,6 +221,9 @@ public MemoryBuffer create() { this.sourceSerDe = sourceSerDe; this.reporter = reporter; this.jobConf = jobConf; + final boolean useDecimal64ColumnVectors = HiveConf.getVar(jobConf, ConfVars + .HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED).equalsIgnoreCase("decimal_64"); + consumer.setUseDecimal64ColumnVectors(useDecimal64ColumnVectors); this.schema = schema; this.writerIncludes = OrcInputFormat.genIncludedColumns(schema, columnIds); SchemaEvolution evolution = new SchemaEvolution(schema, null, diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java index de19b1d..ca6d696 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java @@ -20,14 +20,19 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; +import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.llap.DebugUtils; @@ -35,10 +40,12 @@ import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader.CacheWriter; import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader.DeserializerOrcWriter; import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader.EncodingWriter; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.orc.Writer; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -46,14 +53,20 @@ import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.lazy.fast.LazySimpleDeserializeRead; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; +import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.BinaryComparable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.InputFormat; @@ -113,7 +126,7 @@ public static EncodingWriter create(InputFormat sourceIf, Deserializer ser } LlapIoImpl.LOG.info("Creating VertorDeserializeOrcWriter for " + path); return new VectorDeserializeOrcWriter( - daemonConf, tblProps, sourceOi, sourceIncludes, cacheIncludes, allocSize); + jobConf, tblProps, sourceOi, sourceIncludes, cacheIncludes, allocSize); } private VectorDeserializeOrcWriter(Configuration conf, Properties tblProps, @@ -121,12 +134,12 @@ private VectorDeserializeOrcWriter(Configuration conf, Properties tblProps, int allocSize) throws IOException { super(sourceOi, allocSize); // See also: the usage of VectorDeserializeType, for binary. For now, we only want text. - this.vrbCtx = createVrbCtx(sourceOi); + this.vrbCtx = createVrbCtx(sourceOi, tblProps, conf); this.sourceIncludes = sourceIncludes; this.cacheIncludes = cacheIncludes; this.sourceBatch = vrbCtx.createVectorizedRowBatch(); deserializeRead = new LazySimpleDeserializeRead(vrbCtx.getRowColumnTypeInfos(), - /* useExternalBuffer */ true, createSerdeParams(conf, tblProps)); + vrbCtx.getRowdataTypePhysicalVariations(),/* useExternalBuffer */ true, createSerdeParams(conf, tblProps)); vectorDeserializeRow = new VectorDeserializeRow(deserializeRead); int colCount = vrbCtx.getRowColumnTypeInfos().length; boolean[] includes = null; @@ -192,13 +205,41 @@ public void startAsync(AsyncCallback callback) { this.orcThread.start(); } - private static VectorizedRowBatchCtx createVrbCtx(StructObjectInspector oi) throws IOException { + private static VectorizedRowBatchCtx createVrbCtx(StructObjectInspector oi, final Properties tblProps, + final Configuration conf) throws IOException { + final boolean useDecimal64ColumnVectors = HiveConf.getVar(conf, ConfVars + .HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED).equalsIgnoreCase("decimal_64"); + final String serde = tblProps.getProperty(serdeConstants.SERIALIZATION_LIB); + final String inputFormat = tblProps.getProperty(hive_metastoreConstants.FILE_INPUT_FORMAT); + final boolean isTextFormat = inputFormat != null && inputFormat.equals(TextInputFormat.class.getName()) && + serde != null && serde.equals(LazySimpleSerDe.class.getName()); + List dataTypePhysicalVariations = new ArrayList<>(); + if (isTextFormat) { + StructTypeInfo structTypeInfo = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(oi); + int dataColumnCount = structTypeInfo.getAllStructFieldTypeInfos().size(); + for (int i = 0; i < dataColumnCount; i++) { + DataTypePhysicalVariation dataTypePhysicalVariation = DataTypePhysicalVariation.NONE; + if (useDecimal64ColumnVectors) { + TypeInfo typeInfo = structTypeInfo.getAllStructFieldTypeInfos().get(i); + if (typeInfo instanceof DecimalTypeInfo) { + DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; + if (HiveDecimalWritable.isPrecisionDecimal64(decimalTypeInfo.precision())) { + dataTypePhysicalVariation = DataTypePhysicalVariation.DECIMAL_64; + } + } + } + dataTypePhysicalVariations.add(dataTypePhysicalVariation); + } + } VectorizedRowBatchCtx vrbCtx = new VectorizedRowBatchCtx(); try { vrbCtx.init(oi, new String[0]); } catch (HiveException e) { throw new IOException(e); } + if (!dataTypePhysicalVariations.isEmpty()) { + vrbCtx.setRowDataTypePhysicalVariations(dataTypePhysicalVariations.toArray(new DataTypePhysicalVariation[0])); + } return vrbCtx; } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java index 89ad4aa..d6b16ef 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java @@ -21,6 +21,7 @@ import org.apache.orc.CompressionKind; import org.apache.orc.FileFormatException; +import org.apache.orc.OrcFile; import org.apache.orc.OrcProto.Type; import org.apache.orc.TypeDescription; @@ -29,4 +30,5 @@ CompressionKind getCompressionKind(); List getTypes(); TypeDescription getSchema() throws FileFormatException; + OrcFile.Version getFileVersion(); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java index 5cd6f9f..5eb713c 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java @@ -50,9 +50,10 @@ private final long contentLength; private final long numberOfRows; private final boolean isOriginalFormat; + private final OrcFile.Version fileVersion; public OrcFileMetadata(Object fileKey, OrcProto.Footer footer, OrcProto.PostScript ps, - List stats, List stripes) { + List stats, List stripes, final OrcFile.Version fileVersion) { this.stripeStats = stats; this.compressionKind = CompressionKind.valueOf(ps.getCompression().name()); this.compressionBufferSize = (int)ps.getCompressionBlockSize(); @@ -67,6 +68,7 @@ public OrcFileMetadata(Object fileKey, OrcProto.Footer footer, OrcProto.PostScri this.numberOfRows = footer.getNumberOfRows(); this.fileStats = footer.getStatisticsList(); this.fileKey = fileKey; + this.fileVersion = fileVersion; } // FileMetadata @@ -163,4 +165,9 @@ public int getStripeCount() { public TypeDescription getSchema() throws FileFormatException { return OrcUtils.convertTypeFromProtobuf(this.types, 0); } + + @Override + public OrcFile.Version getFileVersion() { + return fileVersion; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java index 2246901..183fae5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java @@ -214,8 +214,8 @@ public void setWork(FetchWork work) { private static final Map inputFormats = new HashMap(); @SuppressWarnings("unchecked") - static InputFormat getInputFormatFromCache( - Class inputFormatClass, JobConf conf) throws IOException { + public static InputFormat getInputFormatFromCache( + Class inputFormatClass, Configuration conf) throws IOException { if (Configurable.class.isAssignableFrom(inputFormatClass) || JobConfigurable.class.isAssignableFrom(inputFormatClass)) { return ReflectionUtil.newInstance(inputFormatClass, conf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index 9ddb136..84a0a3a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -584,8 +584,8 @@ private int allocateOutputColumnInternal(String columnType, DataTypePhysicalVari // Re-use an existing, available column of the same required type. if (usedOutputColumns.contains(i) || - !(scratchVectorTypeNames)[i].equalsIgnoreCase(columnType) && - scratchDataTypePhysicalVariations[i] == dataTypePhysicalVariation) { + !(scratchVectorTypeNames[i].equalsIgnoreCase(columnType) && + scratchDataTypePhysicalVariations[i] == dataTypePhysicalVariation)) { continue; } //Use i @@ -874,6 +874,7 @@ public VectorExpression getVectorExpression(ExprNodeDesc exprDesc, VectorExpress LOG.debug("Input Expression = " + exprDesc.toString() + ", Vectorized Expression = " + ve.toString()); } + return ve; } @@ -1965,7 +1966,7 @@ public static String getStackTraceAsSingleLine(Throwable e) { return cleaned; } - private VectorExpression instantiateExpression(Class vclass, TypeInfo returnTypeInfo, + public VectorExpression instantiateExpression(Class vclass, TypeInfo returnTypeInfo, DataTypePhysicalVariation returnDataTypePhysicalVariation, Object...args) throws HiveException { VectorExpression ve = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java index e74b185..8ee59e4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java @@ -24,4 +24,5 @@ */ public interface VectorizedInputFormatInterface { + VectorizedSupport.Support[] getSupportedFeatures(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java index 6588385..ffbfb6f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java @@ -163,6 +163,11 @@ public VectorizedRowBatchCtx( return rowDataTypePhysicalVariations; } + public void setRowDataTypePhysicalVariations( + final DataTypePhysicalVariation[] rowDataTypePhysicalVariations) { + this.rowDataTypePhysicalVariations = rowDataTypePhysicalVariations; + } + public int[] getDataColumnNums() { return dataColumnNums; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorInBloomFilterColDynamicValue.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorInBloomFilterColDynamicValue.java index d8a3cac..8bf990a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorInBloomFilterColDynamicValue.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorInBloomFilterColDynamicValue.java @@ -82,6 +82,7 @@ public void init(Configuration conf) { // Instantiate BloomFilterCheck based on input column type switch (colVectorType) { case LONG: + case DECIMAL_64: bfCheck = new LongBloomFilterCheck(); break; case DOUBLE: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFBloomFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFBloomFilter.java index 18bacc5..fdb067f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFBloomFilter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFBloomFilter.java @@ -107,6 +107,7 @@ private void init() { } switch (colVectorType) { case LONG: + case DECIMAL_64: valueProcessor = new ValueProcessorLong(); break; case DOUBLE: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java index 2b005c4..c88ee99 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java @@ -24,6 +24,7 @@ import java.util.Arrays; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -416,7 +417,12 @@ public static HiveDecimalWritable nextDecimal(ColumnVector vector, } else { result = (HiveDecimalWritable) previous; } - result.set(((DecimalColumnVector) vector).vector[row]); + if (vector instanceof Decimal64ColumnVector) { + long value = ((Decimal64ColumnVector) vector).vector[row]; + result.deserialize64(value, ((Decimal64ColumnVector) vector).scale); + } else { + result.set(((DecimalColumnVector) vector).vector[row]); + } return result; } else { return null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java index e632d43..6434414 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java @@ -29,6 +29,7 @@ import java.io.IOException; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.io.NullWritable; @@ -50,6 +51,11 @@ static final int MAX_ROW = 100; // to prevent infinite loop static final Logger LOG = LoggerFactory.getLogger(NullRowsRecordReader.class.getName()); + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return null; + } + public static class DummyInputSplit extends FileSplit { @SuppressWarnings("unused") // Serialization ctor. private DummyInputSplit() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java index 31338d7..9551a2a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java @@ -60,6 +60,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.AcidInputFormat; import org.apache.hadoop.hive.ql.io.AcidOutputFormat; import org.apache.hadoop.hive.ql.io.AcidUtils; @@ -161,6 +162,11 @@ SelfDescribingInputFormatInterface, AcidInputFormat, CombineHiveInputFormat.AvoidSplitCombination, BatchToRowInputFormat { + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return new VectorizedSupport.Support[] {VectorizedSupport.Support.DECIMAL_64}; + } + static enum SplitStrategyKind { HYBRID, BI, @@ -328,7 +334,7 @@ public static RecordReader createReaderFromFile(Reader file, List types = OrcUtils.getOrcTypes(schema); options.include(genIncludedColumns(schema, conf)); setSearchArgument(options, types, conf, isOriginal); - return file.rowsOptions(options); + return file.rowsOptions(options, conf); } public static boolean isOriginal(Reader file) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java index 8c7c72e..d81921c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java @@ -23,6 +23,7 @@ import java.util.TreeMap; import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -231,16 +232,17 @@ public String toString() { * @param maxKey only return keys less than or equal to maxKey if it is * non-null * @param options options to provide to read the rows. + * @param conf * @throws IOException */ @VisibleForTesting ReaderPairAcid(ReaderKey key, Reader reader, - RecordIdentifier minKey, RecordIdentifier maxKey, - ReaderImpl.Options options) throws IOException { + RecordIdentifier minKey, RecordIdentifier maxKey, + ReaderImpl.Options options, final Configuration conf) throws IOException { this.reader = reader; this.key = key; // TODO use stripe statistics to jump over stripes - recordReader = reader.rowsOptions(options); + recordReader = reader.rowsOptions(options, conf); this.minKey = minKey; this.maxKey = maxKey; // advance the reader until we reach the minimum key @@ -437,7 +439,7 @@ static int encodeBucketId(Configuration conf, int bucketId, int statementId) { RecordIdentifier newMinKey = minKey; RecordIdentifier newMaxKey = maxKey; - recordReader = reader.rowsOptions(options); + recordReader = reader.rowsOptions(options, conf); /** * Logically each bucket consists of 0000_0, 0000_0_copy_1... 0000_0_copy_N. etc We don't * know N a priori so if this is true, then the current split is from 0000_0_copy_N file. @@ -586,7 +588,7 @@ public void next(OrcStruct next) throws IOException { throw new IllegalStateException("No 'original' files found for bucketId=" + this.bucketId + " in " + mergerOptions.getRootPath()); } - recordReader = getReader().rowsOptions(options); + recordReader = getReader().rowsOptions(options, conf); next(nextRecord());//load 1st row } @Override public RecordReader getRecordReader() { @@ -620,7 +622,7 @@ public void next(OrcStruct next) throws IOException { nextRecord = null; return; } - recordReader = reader.rowsOptions(options); + recordReader = reader.rowsOptions(options, conf); } } } @@ -1040,7 +1042,7 @@ public Options clone() { //required (on Tez) that base_x/ doesn't have a file for 'bucket' reader = OrcFile.createReader(bucketPath, OrcFile.readerOptions(conf)); pair = new ReaderPairAcid(key, reader, keyInterval.getMinKey(), keyInterval.getMaxKey(), - eventOptions); + eventOptions, conf); } else { pair = new EmptyReaderPair(); @@ -1050,7 +1052,7 @@ public Options clone() { else { assert reader != null : "no reader? " + mergerOptions.getRootPath(); pair = new ReaderPairAcid(key, reader, keyInterval.getMinKey(), keyInterval.getMaxKey(), - eventOptions); + eventOptions, conf); } } minKey = pair.getMinKey(); @@ -1107,7 +1109,7 @@ public Options clone() { //HIVE-17320: we should compute a SARG to push down min/max key to delete_delta Reader deltaReader = OrcFile.createReader(deltaFile, OrcFile.readerOptions(conf)); ReaderPair deltaPair = new ReaderPairAcid(key, deltaReader, minKey, maxKey, - deltaEventOptions); + deltaEventOptions, conf); if (deltaPair.nextRecord() != null) { readers.put(key, deltaPair); } @@ -1121,7 +1123,7 @@ public Options clone() { assert length >= 0; Reader deltaReader = OrcFile.createReader(deltaFile, OrcFile.readerOptions(conf).maxLength(length)); //must get statementId from file name since Acid 1.0 doesn't write it into bucketProperty - ReaderPairAcid deltaPair = new ReaderPairAcid(key, deltaReader, minKey, maxKey, deltaEventOptions); + ReaderPairAcid deltaPair = new ReaderPairAcid(key, deltaReader, minKey, maxKey, deltaEventOptions, conf); if (deltaPair.nextRecord() != null) { readers.put(key, deltaPair); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java index 7485e60..8fd9b90 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java @@ -20,6 +20,7 @@ import java.io.IOException; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -55,7 +56,16 @@ * @throws IOException */ RecordReader rowsOptions(Options options) throws IOException; - + + /** + * Create a RecordReader that reads everything with the given options. + * @param options the options to use + * @param conf conf object + * @return a new RecordReader + * @throws IOException + */ + RecordReader rowsOptions(Options options, Configuration conf) throws IOException; + /** * Create a RecordReader that will scan the entire file. * This is a legacy method and rowsOptions is preferred. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java index 1a6db1f..171b02b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.nio.ByteBuffer; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -73,11 +74,17 @@ public RecordReader rows() throws IOException { @Override public RecordReader rowsOptions(Options options) throws IOException { + return rowsOptions(options, null); + } + + @Override + public RecordReader rowsOptions(Options options, Configuration conf) throws IOException { LOG.info("Reading ORC rows from " + path + " with " + options); - return new RecordReaderImpl(this, options); + return new RecordReaderImpl(this, options, conf); } + @Override public RecordReader rows(boolean[] include) throws IOException { return rowsOptions(new Options().include(include)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java index 5b001a0..c6fe4fc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java @@ -23,8 +23,11 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; @@ -48,6 +51,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.apache.orc.OrcFile; import org.apache.orc.TypeDescription; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -60,9 +64,15 @@ private long baseRow; protected RecordReaderImpl(ReaderImpl fileReader, - Reader.Options options) throws IOException { + Reader.Options options, final Configuration conf) throws IOException { super(fileReader, options); - batch = this.schema.createRowBatch(); + final boolean useDecimal64ColumnVectors = conf != null && HiveConf.getVar(conf, + HiveConf.ConfVars.HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED).equalsIgnoreCase("decimal_64"); + if (useDecimal64ColumnVectors){ + batch = this.schema.createRowBatchV2(); + } else { + batch = this.schema.createRowBatch(); + } rowInBatch = 0; } @@ -80,8 +90,8 @@ boolean ensureBatch() throws IOException { return true; } - public VectorizedRowBatch createRowBatch() { - return this.schema.createRowBatch(); + public VectorizedRowBatch createRowBatch(boolean useDecimal64) { + return useDecimal64 ? this.schema.createRowBatchV2() : this.schema.createRowBatch(); } @Override @@ -393,7 +403,12 @@ static HiveDecimalWritable nextDecimal(ColumnVector vector, } else { result = (HiveDecimalWritable) previous; } - result.set(((DecimalColumnVector) vector).vector[row]); + if (vector instanceof Decimal64ColumnVector) { + long value = ((Decimal64ColumnVector) vector).vector[row]; + result.deserialize64(value, ((Decimal64ColumnVector) vector).scale); + } else { + result.set(((DecimalColumnVector) vector).vector[row]); + } return result; } else { return null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java index a4568de..da27f3d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java @@ -110,7 +110,7 @@ final Reader reader = OrcInputFormat.createOrcReaderForSplit(conf, (OrcSplit) inputSplit); // Careful with the range here now, we do not want to read the whole base file like deltas. - innerReader = reader.rowsOptions(readerOptions.range(offset, length)); + innerReader = reader.rowsOptions(readerOptions.range(offset, length), conf); baseReader = new org.apache.hadoop.mapred.RecordReader() { @Override @@ -143,7 +143,13 @@ public float getProgress() throws IOException { return innerReader.getProgress(); } }; - this.vectorizedRowBatchBase = ((RecordReaderImpl) innerReader).createRowBatch(); + final boolean useDecimal64ColumnVectors = HiveConf + .getVar(conf, ConfVars.HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED).equalsIgnoreCase("decimal_64"); + if (useDecimal64ColumnVectors) { + this.vectorizedRowBatchBase = ((RecordReaderImpl) innerReader).createRowBatch(true); + } else { + this.vectorizedRowBatchBase = ((RecordReaderImpl) innerReader).createRowBatch(false); + } } /** @@ -861,10 +867,16 @@ public String toString() { private final boolean isBucketedTable; DeleteReaderValue(Reader deleteDeltaReader, Reader.Options readerOptions, int bucket, - ValidWriteIdList validWriteIdList, boolean isBucketedTable) throws IOException { - this.recordReader = deleteDeltaReader.rowsOptions(readerOptions); + ValidWriteIdList validWriteIdList, boolean isBucketedTable, final JobConf conf) throws IOException { + this.recordReader = deleteDeltaReader.rowsOptions(readerOptions, conf); this.bucketForSplit = bucket; - this.batch = deleteDeltaReader.getSchema().createRowBatch(); + final boolean useDecimal64ColumnVector = HiveConf.getVar(conf, ConfVars + .HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED).equalsIgnoreCase("decimal_64"); + if (useDecimal64ColumnVector) { + this.batch = deleteDeltaReader.getSchema().createRowBatchV2(); + } else { + this.batch = deleteDeltaReader.getSchema().createRowBatch(); + } if (!recordReader.nextBatch(batch)) { // Read the first batch. this.batch = null; // Oh! the first batch itself was null. Close the reader. } @@ -1056,7 +1068,7 @@ public int compareTo(CompressedOwid other) { throw new DeleteEventsOverflowMemoryException(); } DeleteReaderValue deleteReaderValue = new DeleteReaderValue(deleteDeltaReader, - readerOptions, bucket, validWriteIdList, isBucketedTable); + readerOptions, bucket, validWriteIdList, isBucketedTable, conf); DeleteRecordKey deleteRecordKey = new DeleteRecordKey(); if (deleteReaderValue.next(deleteRecordKey)) { sortMerger.put(deleteRecordKey, deleteReaderValue); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java index c581bba..892fcc0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.InputFormatChecker; import org.apache.hadoop.hive.ql.io.SelfDescribingInputFormatInterface; @@ -99,7 +100,7 @@ options.include(OrcInputFormat.genIncludedColumns(schema, conf)); OrcInputFormat.setSearchArgument(options, types, conf, true); - this.reader = file.rowsOptions(options); + this.reader = file.rowsOptions(options, conf); int partitionColumnCount = rbCtx.getPartitionColumnCount(); if (partitionColumnCount > 0) { @@ -204,4 +205,9 @@ public boolean validateInput(FileSystem fs, HiveConf conf, } return true; } + + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return new VectorizedSupport.Support[] {VectorizedSupport.Support.DECIMAL_64}; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java index 71682af..91a01e9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java @@ -24,6 +24,8 @@ import java.util.Map; import java.util.Set; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -63,6 +65,7 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.orc.PhysicalWriter; +import org.apache.orc.TypeDescription; /** * An ORC file writer. The file is divided into stripes, which is the natural @@ -93,7 +96,15 @@ OrcFile.WriterOptions opts) throws IOException { super(fs, path, opts); this.inspector = opts.getInspector(); - this.internalBatch = opts.getSchema().createRowBatch(opts.getBatchSize()); + boolean useDecimal64ColumnVectors = opts.getConfiguration() != null && + HiveConf.getVar(opts.getConfiguration(), HiveConf.ConfVars.HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED) + .equalsIgnoreCase("decimal_64"); + if (useDecimal64ColumnVectors) { + this.internalBatch = opts.getSchema().createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64, + opts.getBatchSize()); + } else { + this.internalBatch = opts.getSchema().createRowBatch(opts.getBatchSize()); + } this.fields = initializeFieldsFromOi(inspector); } @@ -207,9 +218,15 @@ static void setColumn(int rowId, ColumnVector column, break; } case DECIMAL: { - DecimalColumnVector vector = (DecimalColumnVector) column; - vector.set(rowId, ((HiveDecimalObjectInspector) inspector) + if (column instanceof Decimal64ColumnVector) { + Decimal64ColumnVector vector = (Decimal64ColumnVector) column; + vector.set(rowId, ((HiveDecimalObjectInspector) inspector) + .getPrimitiveWritableObject(obj)); + } else { + DecimalColumnVector vector = (DecimalColumnVector) column; + vector.set(rowId, ((HiveDecimalObjectInspector) inspector) .getPrimitiveWritableObject(obj)); + } break; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java index c9078be..9302791 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hive.ql.io.orc.encoded; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; +import org.apache.orc.OrcFile; import org.apache.orc.impl.RunLengthByteReader; import java.io.IOException; @@ -1200,6 +1202,147 @@ public static StreamReaderBuilder builder() { } } + protected static class Decimal64StreamReader extends Decimal64TreeReader implements SettableTreeReader { + private boolean _isFileCompressed; + private SettableUncompressedStream _presentStream; + private SettableUncompressedStream _valueStream; + private List vectors; + private int vectorIndex = 0; + + private Decimal64StreamReader(int columnId, int precision, int scale, + SettableUncompressedStream presentStream, + SettableUncompressedStream valueStream, + boolean isFileCompressed, + OrcProto.ColumnEncoding encoding, TreeReaderFactory.Context context, + List vectors) throws IOException { + super(columnId, presentStream, valueStream, encoding, + precision, scale, context); + this._isFileCompressed = isFileCompressed; + this._presentStream = presentStream; + this._valueStream = valueStream; + this.vectors = vectors; + } + + @Override + public void seek(PositionProvider index) throws IOException { + if (vectors != null) return; + if (present != null) { + if (_isFileCompressed) { + index.getNext(); + } + present.seek(index); + } + + // data stream could be empty stream or already reached end of stream before present stream. + // This can happen if all values in stream are nulls or last row group values are all null. + skipCompressedIndex(_isFileCompressed, index); + if (_valueStream.available() > 0) { + valueReader.seek(index); + } else { + skipSeek(index); + } + } + + @Override + public void nextVector( + ColumnVector previousVector, boolean[] isNull, int batchSize) throws IOException { + if (vectors == null) { + super.nextVector(previousVector, isNull, batchSize); + return; + } + vectors.get(vectorIndex++).shallowCopyTo(previousVector); + if (vectorIndex == vectors.size()) { + vectors = null; + } + } + + @Override + public void setBuffers(EncodedColumnBatch batch, boolean sameStripe) { + assert vectors == null; // See the comment in TimestampStreamReader.setBuffers. + ColumnStreamData[] streamsData = batch.getColumnData(columnId); + if (_presentStream != null) { + _presentStream.setBuffers(StreamUtils.createDiskRangeInfo(streamsData[OrcProto.Stream.Kind.PRESENT_VALUE])); + } + if (_valueStream != null) { + _valueStream.setBuffers(StreamUtils.createDiskRangeInfo(streamsData[OrcProto.Stream.Kind.DATA_VALUE])); + } + } + + public static class StreamReaderBuilder { + private int columnIndex; + private ColumnStreamData presentStream; + private ColumnStreamData valueStream; + private int scale; + private int precision; + private CompressionCodec compressionCodec; + private OrcProto.ColumnEncoding columnEncoding; + private List vectors; + private TreeReaderFactory.Context context; + + public StreamReaderBuilder setColumnIndex(int columnIndex) { + this.columnIndex = columnIndex; + return this; + } + + public StreamReaderBuilder setPrecision(int precision) { + this.precision = precision; + return this; + } + + public StreamReaderBuilder setScale(int scale) { + this.scale = scale; + return this; + } + + public StreamReaderBuilder setContext(TreeReaderFactory.Context context) { + this.context = context; + return this; + } + + public StreamReaderBuilder setPresentStream(ColumnStreamData presentStream) { + this.presentStream = presentStream; + return this; + } + + public StreamReaderBuilder setValueStream(ColumnStreamData valueStream) { + this.valueStream = valueStream; + return this; + } + + + public StreamReaderBuilder setCompressionCodec(CompressionCodec compressionCodec) { + this.compressionCodec = compressionCodec; + return this; + } + + public StreamReaderBuilder setColumnEncoding(OrcProto.ColumnEncoding encoding) { + this.columnEncoding = encoding; + return this; + } + + public Decimal64StreamReader build() throws IOException { + SettableUncompressedStream presentInStream = StreamUtils.createSettableUncompressedStream( + OrcProto.Stream.Kind.PRESENT.name(), presentStream); + + SettableUncompressedStream valueInStream = StreamUtils.createSettableUncompressedStream( + OrcProto.Stream.Kind.DATA.name(), valueStream); + + boolean isFileCompressed = compressionCodec != null; + return new Decimal64StreamReader(columnIndex, precision, scale, presentInStream, + valueInStream, isFileCompressed, columnEncoding, context, vectors); + } + + public StreamReaderBuilder setVectors(List vectors) { + this.vectors = vectors; + return this; + } + } + + public static StreamReaderBuilder builder() { + return new StreamReaderBuilder(); + } + } + protected static class DateStreamReader extends DateTreeReader implements SettableTreeReader { private boolean isFileCompressed; private SettableUncompressedStream _presentStream; @@ -2101,8 +2244,8 @@ public static StreamReaderBuilder builder() { } public static StructTreeReader createRootTreeReader(TypeDescription[] batchSchemas, - List encodings, OrcEncodedColumnBatch batch, - CompressionCodec codec, TreeReaderFactory.Context context) throws IOException { + List encodings, OrcEncodedColumnBatch batch, + CompressionCodec codec, Context context, final boolean useDecimal64ColumnVectors) throws IOException { // Note: we only look at the schema here to deal with complex types. Somebody has set up the // reader with whatever ideas they had to the schema and we just trust the reader to // produce the CVBs that was asked for. However, we only need to look at top level columns. @@ -2117,7 +2260,7 @@ public static StructTreeReader createRootTreeReader(TypeDescription[] batchSchem if (!batch.hasData(batchColIx) && !batch.hasVectors(batchColIx)) { throw new AssertionError("No data for column " + batchColIx + ": " + batchSchemas[i]); } - childReaders[i] = createEncodedTreeReader(batchSchemas[i], encodings, batch, codec, context); + childReaders[i] = createEncodedTreeReader(batchSchemas[i], encodings, batch, codec, context, useDecimal64ColumnVectors); } // TODO: do we actually need this reader? the caller just extracts child readers. @@ -2138,8 +2281,8 @@ private static void skipSeek(PositionProvider index) { private static TreeReader createEncodedTreeReader(TypeDescription schema, - List encodings, OrcEncodedColumnBatch batch, - CompressionCodec codec, TreeReaderFactory.Context context) throws IOException { + List encodings, OrcEncodedColumnBatch batch, + CompressionCodec codec, Context context, final boolean useDecimal64ColumnVectors) throws IOException { int columnIndex = schema.getId(); ColumnStreamData[] streamBuffers = null; List vectors = null; @@ -2200,12 +2343,12 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, case TIMESTAMP: case DATE: return getPrimitiveTreeReader(columnIndex, schema, codec, columnEncoding, - present, data, dictionary, lengths, secondary, context, vectors); + present, data, dictionary, lengths, secondary, context, vectors, useDecimal64ColumnVectors); case LIST: assert vectors == null; // Not currently supported. TypeDescription elementType = schema.getChildren().get(0); TreeReader elementReader = createEncodedTreeReader( - elementType, encodings, batch, codec, context); + elementType, encodings, batch, codec, context, useDecimal64ColumnVectors); return ListStreamReader.builder() .setColumnIndex(columnIndex) .setColumnEncoding(columnEncoding) @@ -2220,9 +2363,9 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, TypeDescription keyType = schema.getChildren().get(0); TypeDescription valueType = schema.getChildren().get(1); TreeReader keyReader = createEncodedTreeReader( - keyType, encodings, batch, codec, context); + keyType, encodings, batch, codec, context, useDecimal64ColumnVectors); TreeReader valueReader = createEncodedTreeReader( - valueType, encodings, batch, codec, context); + valueType, encodings, batch, codec, context, useDecimal64ColumnVectors); return MapStreamReader.builder() .setColumnIndex(columnIndex) .setColumnEncoding(columnEncoding) @@ -2240,7 +2383,7 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, for (int i = 0; i < childCount; i++) { TypeDescription childType = schema.getChildren().get(i); childReaders[i] = createEncodedTreeReader( - childType, encodings, batch, codec, context); + childType, encodings, batch, codec, context, useDecimal64ColumnVectors); } return StructStreamReader.builder() .setColumnIndex(columnIndex) @@ -2258,7 +2401,7 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, for (int i = 0; i < childCount; i++) { TypeDescription childType = schema.getChildren().get(i); childReaders[i] = createEncodedTreeReader( - childType, encodings, batch, codec, context); + childType, encodings, batch, codec, context, useDecimal64ColumnVectors); } return UnionStreamReader.builder() .setColumnIndex(columnIndex) @@ -2276,10 +2419,10 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, } private static TreeReader getPrimitiveTreeReader(final int columnIndex, - TypeDescription columnType, CompressionCodec codec, OrcProto.ColumnEncoding columnEncoding, - ColumnStreamData present, ColumnStreamData data, ColumnStreamData dictionary, - ColumnStreamData lengths, ColumnStreamData secondary, TreeReaderFactory.Context context, - List vectors) throws IOException { + TypeDescription columnType, CompressionCodec codec, OrcProto.ColumnEncoding columnEncoding, + ColumnStreamData present, ColumnStreamData data, ColumnStreamData dictionary, + ColumnStreamData lengths, ColumnStreamData secondary, Context context, + List vectors, final boolean useDecimal64ColumnVectors) throws IOException { switch (columnType.getCategory()) { case BINARY: return BinaryStreamReader.builder() @@ -2390,7 +2533,36 @@ private static TreeReader getPrimitiveTreeReader(final int columnIndex, .setVectors(vectors) .build(); case DECIMAL: - return DecimalStreamReader.builder() + // special handling for serde reader (text) in llap IO. + // if file format version is null, then we are processing text IF in LLAP IO, in which case + // we get vectors instead of streams. If vectors contain instance of Decimal64ColumnVector we + // should use Decimal64StreamReader (which acts as a wrapper around vectors) + boolean useDecimal64Reader = context.getFileFormat() == null && vectors != null && useDecimal64ColumnVectors; + if (useDecimal64Reader) { + boolean containDecimal64CV = false; + for (ColumnVector vector : vectors) { + if (vector instanceof Decimal64ColumnVector) { + containDecimal64CV = true; + break; + } + } + useDecimal64Reader &= containDecimal64CV; + } + if ((context.getFileFormat() == OrcFile.Version.UNSTABLE_PRE_2_0 || useDecimal64Reader) && + columnType.getPrecision() <= TypeDescription.MAX_DECIMAL64_PRECISION) { + return Decimal64StreamReader.builder() + .setColumnIndex(columnIndex) + .setPrecision(columnType.getPrecision()) + .setScale(columnType.getScale()) + .setPresentStream(present) + .setValueStream(data) + .setCompressionCodec(codec) + .setColumnEncoding(columnEncoding) + .setVectors(vectors) + .setContext(context) + .build(); + } else { + return DecimalStreamReader.builder() .setColumnIndex(columnIndex) .setPrecision(columnType.getPrecision()) .setScale(columnType.getScale()) @@ -2402,6 +2574,7 @@ private static TreeReader getPrimitiveTreeReader(final int columnIndex, .setVectors(vectors) .setContext(context) .build(); + } case TIMESTAMP: return TimestampStreamReader.builder() .setColumnIndex(columnIndex) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java index ed6d577..5e70a05 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.common.io.FileMetadataCache; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.InputFormatChecker; import org.apache.hadoop.hive.ql.io.LlapCacheOnlyInputFormatInterface; @@ -115,4 +116,9 @@ public boolean validateInput(FileSystem fs, HiveConf conf, List file return true; } + + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return null; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index 394f826..a8dc803 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -20,6 +20,7 @@ import static org.apache.hadoop.hive.ql.plan.ReduceSinkDesc.ReducerTraits.UNIFORM; +import java.io.IOException; import java.io.Serializable; import java.lang.annotation.Annotation; import java.util.ArrayList; @@ -40,6 +41,7 @@ import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.exec.vector.reducesink.*; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.parse.spark.SparkPartitionPruningSinkOperator; @@ -129,7 +131,6 @@ import org.apache.hadoop.hive.ql.plan.VectorPTFInfo; import org.apache.hadoop.hive.ql.plan.VectorPTFDesc.SupportedFunctionType; import org.apache.hadoop.hive.ql.plan.VectorTableScanDesc; -import org.apache.hadoop.hive.ql.plan.VectorizationCondition; import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc.ProcessingMode; import org.apache.hadoop.hive.ql.plan.VectorSparkHashTableSinkDesc; import org.apache.hadoop.hive.ql.plan.VectorSparkPartitionPruningSinkDesc; @@ -1207,6 +1208,14 @@ private void determineDataColumnNums(TableScanOperator tableScanOperator, private Support[] getVectorizedInputFormatSupports( Class inputFileFormatClass) { + try { + InputFormat inputFormat = FetchOperator.getInputFormatFromCache(inputFileFormatClass, hiveConf); + if (inputFormat instanceof VectorizedInputFormatInterface) { + return ((VectorizedInputFormatInterface) inputFormat).getSupportedFeatures(); + } + } catch (IOException e) { + LOG.error("Unable to instantiate {} input format class. Cannot determine vectorization support.", e); + } // FUTURE: Decide how to ask an input file format what vectorization features it supports. return null; } @@ -1830,14 +1839,6 @@ private void validateAndVectorizeMapWork(MapWork mapWork, VectorTaskColumnInfo v supportRemovedReasons.add(removeString); } - // And, if LLAP is enabled for now, disable DECIMAL_64; - if (isLlapIoEnabled && supportSet.contains(Support.DECIMAL_64)) { - supportSet.remove(Support.DECIMAL_64); - String removeString = - "DECIMAL_64 disabled because LLAP is enabled"; - supportRemovedReasons.add(removeString); - } - // Now rememember what is supported for this query and any support that was // removed. vectorTaskColumnInfo.setSupportSetInUse(supportSet); @@ -2246,6 +2247,7 @@ private boolean getOnlyStructObjectInspectors(ReduceWork reduceWork, @Override public PhysicalContext resolve(PhysicalContext physicalContext) throws SemanticException { + physicalContext = physicalContext; hiveConf = physicalContext.getConf(); planMapper = physicalContext.getContext().getPlanMapper(); @@ -4265,6 +4267,13 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { vecAggrClasses, aggregateName, inputColVectorType, outputColVectorType, udafEvaluatorMode); if (vecAggrClass != null) { + // for now, disable operating on decimal64 column vectors for semijoin reduction as + // we have to make sure same decimal type should be used during bloom filter creation + // and bloom filter probing + if (aggregateName.equals("bloom_filter")) { + inputExpression = vContext.wrapWithDecimal64ToDecimalConversion(inputExpression); + inputColVectorType = ColumnVector.Type.DECIMAL; + } final VectorAggregationDesc vecAggrDesc = new VectorAggregationDesc( aggrDesc, evaluator, inputTypeInfo, inputColVectorType, inputExpression, @@ -4359,8 +4368,6 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { return new ImmutablePair, String>(vectorOp, null); } - static int fake; - public static Operator vectorizeSelectOperator( Operator selectOp, VectorizationContext vContext, VectorSelectDesc vectorSelectDesc) @@ -4386,6 +4393,13 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { if (index < size) { vectorSelectExprs = Arrays.copyOf(vectorSelectExprs, index); } + + // Fix up the case where parent expression's output data type physical variations is DECIMAL whereas + // at least one of its children is DECIMAL_64. Some expressions like x % y for example only accepts DECIMAL + // for x and y (at this time there is only DecimalColModuloDecimalColumn so both x and y has to be DECIMAL). + // The following method introduces a cast if x or y is DECIMAL_64 and parent expression (x % y) is DECIMAL. + fixDecimalDataTypePhysicalVariations(vContext, vectorSelectExprs); + vectorSelectDesc.setSelectExpressions(vectorSelectExprs); vectorSelectDesc.setProjectedOutputColumns(projectedOutputColumns); @@ -4394,6 +4408,71 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { vContext, vectorSelectDesc); } + private static void fixDecimalDataTypePhysicalVariations(final VectorizationContext vContext, + final VectorExpression[] vectorSelectExprs) throws HiveException { + for (int i = 0; i < vectorSelectExprs.length; i++) { + VectorExpression parent = vectorSelectExprs[i]; + VectorExpression newParent = fixDecimalDataTypePhysicalVariations(parent, parent.getChildExpressions(), + vContext); + if (parent != newParent) { + vectorSelectExprs[i] = newParent; + } + } + } + + private static VectorExpression fixDecimalDataTypePhysicalVariations(final VectorExpression parent, + final VectorExpression[] children, final VectorizationContext vContext) throws HiveException { + if (children == null || children.length == 0) { + return parent; + } + + for (int i = 0; i < children.length; i++) { + VectorExpression child = children[i]; + VectorExpression newChild = fixDecimalDataTypePhysicalVariations(child, child.getChildExpressions(), vContext); + if (child != newChild) { + children[i] = newChild; + } + } + if (parent.getOutputDataTypePhysicalVariation() == DataTypePhysicalVariation.NONE) { + boolean inputArgsChanged = false; + DataTypePhysicalVariation[] dataTypePhysicalVariations = parent.getInputDataTypePhysicalVariations(); + for (int i = 0; i < children.length; i++) { + VectorExpression vce = children[i]; + // we found at least one children with mismatch + if (vce.getOutputDataTypePhysicalVariation() == DataTypePhysicalVariation.DECIMAL_64) { + VectorExpression castedVCE = vContext.wrapWithDecimal64ToDecimalConversion(vce); + children[i] = castedVCE; + inputArgsChanged = true; + dataTypePhysicalVariations[i] = DataTypePhysicalVariation.NONE; + } + } + // fix up the input column numbers and output column numbers + if (inputArgsChanged) { + int argumentCount = children.length + (parent.getOutputColumnNum() == -1 ? 0 : 1); + Object[] arguments = new Object[argumentCount]; + // new input column numbers + for (int i = 0; i < children.length; i++) { + VectorExpression vce = children[i]; + arguments[i] = vce.getOutputColumnNum(); + } + // retain output column number from parent + if (parent.getOutputColumnNum() != -1) { + arguments[arguments.length - 1] = parent.getOutputColumnNum(); + } + // re-instantiate the parent expression with new arguments + VectorExpression newParent = vContext.instantiateExpression(parent.getClass(), parent.getOutputTypeInfo(), + parent.getOutputDataTypePhysicalVariation(), arguments); + newParent.setOutputTypeInfo(parent.getOutputTypeInfo()); + newParent.setOutputDataTypePhysicalVariation(parent.getOutputDataTypePhysicalVariation()); + newParent.setInputTypeInfos(parent.getInputTypeInfos()); + newParent.setInputDataTypePhysicalVariations(dataTypePhysicalVariations); + newParent.setChildExpressions(parent.getChildExpressions()); + return newParent; + } + } + return parent; + } + private static void fillInPTFEvaluators( List windowsFunctions, String[] evaluatorFunctionNames, diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java index dc58ad1..fe475f6 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java @@ -229,7 +229,7 @@ static String getColumnNamesProperty() { return "booleanValue,byteValue,shortValue,intValue,longValue,floatValue,doubleValue,stringValue,decimalValue,dateValue,timestampValue"; } static String getColumnTypesProperty() { - return "boolean:tinyint:smallint:int:bigint:float:double:string:decimal:date:timestamp"; + return "boolean:tinyint:smallint:int:bigint:float:double:string:decimal(38,18):date:timestamp"; } } @@ -3847,9 +3847,10 @@ public void testRowNumberUniquenessInDifferentSplits() throws Exception { * Test schema evolution when using the reader directly. */ @Test - public void testSchemaEvolution() throws Exception { + public void testSchemaEvolutionOldDecimal() throws Exception { TypeDescription fileSchema = TypeDescription.fromString("struct,d:string>"); + conf.set(ConfVars.HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED.varname, "decimal_64"); Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf) .fileSystem(fs) @@ -3915,6 +3916,78 @@ public void testSchemaEvolution() throws Exception { } /** + * Test schema evolution when using the reader directly. + */ + @Test + public void testSchemaEvolutionDecimal64() throws Exception { + TypeDescription fileSchema = + TypeDescription.fromString("struct,d:string>"); + conf.set(ConfVars.HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED.varname, "decimal_64"); + Writer writer = OrcFile.createWriter(testFilePath, + OrcFile.writerOptions(conf) + .fileSystem(fs) + .setSchema(fileSchema) + .compress(org.apache.orc.CompressionKind.NONE)); + VectorizedRowBatch batch = fileSchema.createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64,1000); + batch.size = 1000; + LongColumnVector lcv = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[0]); + for(int r=0; r < 1000; r++) { + ((LongColumnVector) batch.cols[0]).vector[r] = r * 42; + lcv.vector[r] = r * 10001; + ((BytesColumnVector) batch.cols[2]).setVal(r, + Integer.toHexString(r).getBytes(StandardCharsets.UTF_8)); + } + writer.addRowBatch(batch); + writer.close(); + TypeDescription readerSchema = TypeDescription.fromString( + "struct,d:string,future2:int>"); + Reader reader = OrcFile.createReader(testFilePath, + OrcFile.readerOptions(conf).filesystem(fs)); + RecordReader rows = reader.rowsOptions(new Reader.Options() + .schema(readerSchema)); + batch = readerSchema.createRowBatchV2(); + lcv = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[0]); + LongColumnVector future1 = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[1]); + assertEquals(true, rows.nextBatch(batch)); + assertEquals(1000, batch.size); + assertEquals(true, future1.isRepeating); + assertEquals(true, future1.isNull[0]); + assertEquals(true, batch.cols[3].isRepeating); + assertEquals(true, batch.cols[3].isNull[0]); + for(int r=0; r < batch.size; ++r) { + assertEquals("row " + r, r * 42, ((LongColumnVector) batch.cols[0]).vector[r]); + assertEquals("row " + r, r * 10001, lcv.vector[r]); + assertEquals("row " + r, r * 10001, lcv.vector[r]); + assertEquals("row " + r, Integer.toHexString(r), + ((BytesColumnVector) batch.cols[2]).toString(r)); + } + assertEquals(false, rows.nextBatch(batch)); + rows.close(); + + // try it again with an include vector + rows = reader.rowsOptions(new Reader.Options() + .schema(readerSchema) + .include(new boolean[]{false, true, true, true, false, false, true})); + batch = readerSchema.createRowBatchV2(); + lcv = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[0]); + future1 = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[1]); + assertEquals(true, rows.nextBatch(batch)); + assertEquals(1000, batch.size); + assertEquals(true, future1.isRepeating); + assertEquals(true, future1.isNull[0]); + assertEquals(true, batch.cols[3].isRepeating); + assertEquals(true, batch.cols[3].isNull[0]); + assertEquals(true, batch.cols[2].isRepeating); + assertEquals(true, batch.cols[2].isNull[0]); + for(int r=0; r < batch.size; ++r) { + assertEquals("row " + r, r * 42, ((LongColumnVector) batch.cols[0]).vector[r]); + assertEquals("row " + r, r * 10001, lcv.vector[r]); + } + assertEquals(false, rows.nextBatch(batch)); + rows.close(); + } + + /** * Test column projection when using ACID. */ @Test @@ -3933,7 +4006,7 @@ public void testColumnProjectionWithAcid() throws Exception { .fileSystem(fs) .setSchema(fileSchema) .compress(org.apache.orc.CompressionKind.NONE)); - VectorizedRowBatch batch = fileSchema.createRowBatch(1000); + VectorizedRowBatch batch = fileSchema.createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64,1000); batch.size = 1000; StructColumnVector scv = (StructColumnVector)batch.cols[5]; // operation @@ -4047,7 +4120,7 @@ public void testAcidReadPastLastStripeOffset() throws Exception { .stripeSize(128); // Create ORC file with small stripe size so we can write multiple stripes. Writer writer = OrcFile.createWriter(testFilePath, options); - VectorizedRowBatch batch = fileSchema.createRowBatch(1000); + VectorizedRowBatch batch = fileSchema.createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64,1000); batch.size = 1000; StructColumnVector scv = (StructColumnVector)batch.cols[5]; // operation diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java index d8a7af8..cc29384 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java @@ -168,7 +168,7 @@ private Reader createMockReader() throws IOException { setRow(row4, OrcRecordUpdater.INSERT_OPERATION, 40, 50, 60, 130, "fourth"); OrcStruct row5 = new OrcStruct(OrcRecordUpdater.FIELDS); setRow(row5, OrcRecordUpdater.INSERT_OPERATION, 40, 50, 61, 140, "fifth"); - Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class))) + Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class), Mockito.any(HiveConf.class))) .thenReturn(recordReader); Mockito.when(recordReader.hasNext()). @@ -192,7 +192,7 @@ public void testReaderPair() throws Exception { RecordIdentifier minKey = new RecordIdentifier(10, 20, 30); RecordIdentifier maxKey = new RecordIdentifier(40, 50, 60); ReaderPair pair = new OrcRawRecordMerger.ReaderPairAcid(key, reader, minKey, maxKey, - new Reader.Options()); + new Reader.Options(), new HiveConf()); RecordReader recordReader = pair.getRecordReader(); assertEquals(10, key.getWriteId()); assertEquals(20, key.getBucketProperty()); @@ -218,7 +218,7 @@ public void testReaderPairNoMin() throws Exception { Reader reader = createMockReader(); ReaderPair pair = new OrcRawRecordMerger.ReaderPairAcid(key, reader, null, null, - new Reader.Options()); + new Reader.Options(), new HiveConf()); RecordReader recordReader = pair.getRecordReader(); assertEquals(10, key.getWriteId()); assertEquals(20, key.getBucketProperty()); @@ -274,7 +274,7 @@ private Reader createMockOriginalReader() throws IOException { OrcStruct row4 = createOriginalRow("fourth"); OrcStruct row5 = createOriginalRow("fifth"); - Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class))) + Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class), Mockito.any(HiveConf.class))) .thenReturn(recordReader); Mockito.when(recordReader.hasNext()). thenReturn(true, true, true, true, true, false); @@ -410,7 +410,7 @@ public void testNewBase() throws Exception { types.add(typeBuilder.build()); Mockito.when(reader.getTypes()).thenReturn(types); - Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class))) + Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class), Mockito.any(HiveConf.class))) .thenReturn(recordReader); OrcStruct row1 = new OrcStruct(OrcRecordUpdater.FIELDS); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java index 0c9c95d..c23f00e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java @@ -49,6 +49,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.apache.orc.TypeDescription; import org.junit.Before; import org.junit.Test; @@ -151,7 +152,7 @@ private void checkVectorizedReader() throws Exception { OrcFile.readerOptions(conf)); RecordReaderImpl vrr = (RecordReaderImpl) vreader.rows(); RecordReaderImpl rr = (RecordReaderImpl) reader.rows(); - VectorizedRowBatch batch = reader.getSchema().createRowBatch(); + VectorizedRowBatch batch = reader.getSchema().createRowBatchV2(); OrcStruct row = null; // Check Vectorized ORC reader against ORC row reader diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java index e478371..551e5ca 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java @@ -226,7 +226,7 @@ private void testVectorizedOrcAcidRowBatchReader(String deleteEventRegistry) thr assertTrue(vectorizedReader.getDeleteEventRegistry() instanceof SortMergedDeleteEventRegistry); } TypeDescription schema = OrcInputFormat.getDesiredRowTypeDescr(conf, true, Integer.MAX_VALUE); - VectorizedRowBatch vectorizedRowBatch = schema.createRowBatch(); + VectorizedRowBatch vectorizedRowBatch = schema.createRowBatchV2(); vectorizedRowBatch.setPartitionInfo(1, 0); // set data column count as 1. long previousPayload = Long.MIN_VALUE; while (vectorizedReader.next(null, vectorizedRowBatch)) { diff --git a/ql/src/test/queries/clientpositive/explainanalyze_3.q b/ql/src/test/queries/clientpositive/explainanalyze_3.q index 3d5b3a8..1f31218 100644 --- a/ql/src/test/queries/clientpositive/explainanalyze_3.q +++ b/ql/src/test/queries/clientpositive/explainanalyze_3.q @@ -110,7 +110,7 @@ select * from cte; explain analyze with cte as (select * from src order by key limit 5) select * from cte; -create table orc_merge5_n1 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc; +create table orc_merge5_n1 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc; load data local inpath '../../data/files/orc_split_elim.orc' into table orc_merge5_n1; diff --git a/ql/src/test/queries/clientpositive/llap_acid2.q b/ql/src/test/queries/clientpositive/llap_acid2.q index a409c26..cd06d31 100644 --- a/ql/src/test/queries/clientpositive/llap_acid2.q +++ b/ql/src/test/queries/clientpositive/llap_acid2.q @@ -29,18 +29,27 @@ CREATE TABLE orc_llap_n2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='true'); + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='true','orc.write.format'='UNSTABLE-PRE-2.0'); insert into table orc_llap_n2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30; - + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30; +alter table orc_llap_n2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12'); +insert into table orc_llap_n2 +select cint, cbigint, cfloat, cdouble, + cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, + cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, + cstring1, cfloat as c9, cast("3.321" as decimal(10,3))as c10, + cast("9.987654321" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30; CREATE TABLE orc_llap2 ( @@ -57,18 +66,22 @@ CREATE TABLE orc_llap2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='false'); + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='false', 'orc.write.format'='UNSTABLE-PRE-2.0'); insert into table orc_llap2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30; + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30; -alter table orc_llap2 set TBLPROPERTIES ('transactional'='true'); +alter table orc_llap2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12'); -update orc_llap2 set cstring1 = 'testvalue' where cstring1 = 'N016jPED08o'; +update orc_llap2 set cstring1 = 'testvalue', cdecimal1 = cast("3.321" as decimal(10,3)), +cdecimal2 = cast("9.987654321" as decimal(38,18)) where cstring1 = 'N016jPED08o'; SET hive.llap.io.enabled=true; diff --git a/ql/src/test/queries/clientpositive/llap_decimal64_reader.q b/ql/src/test/queries/clientpositive/llap_decimal64_reader.q new file mode 100644 index 0000000..a81feba --- /dev/null +++ b/ql/src/test/queries/clientpositive/llap_decimal64_reader.q @@ -0,0 +1,54 @@ +--! qt:dataset:alltypesorc +SET hive.vectorized.execution.enabled=true; + +SET hive.llap.io.enabled=false; + +SET hive.exec.orc.default.row.index.stride=1000; +SET hive.optimize.index.filter=true; +set hive.auto.convert.join=false; + +DROP TABLE orc_llap_n0; + +-- this test mix and matches orc versions and flips config to use decimal64 column vectors +set hive.auto.convert.join=true; +SET hive.llap.io.enabled=true; +CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE"); + +insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; + +alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0'); + +insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("4.456" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; + +set hive.vectorized.input.format.supports.enabled=decimal_64; +explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; +select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; + +set hive.vectorized.input.format.supports.enabled=none; +explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; +select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; + +DROP TABLE orc_llap_n0; diff --git a/ql/src/test/queries/clientpositive/llap_uncompressed.q b/ql/src/test/queries/clientpositive/llap_uncompressed.q index 875356c..de3cdc6 100644 --- a/ql/src/test/queries/clientpositive/llap_uncompressed.q +++ b/ql/src/test/queries/clientpositive/llap_uncompressed.q @@ -24,13 +24,20 @@ CREATE TABLE orc_llap_n0( ctimestamp1 TIMESTAMP, ctimestamp2 TIMESTAMP, cboolean1 BOOLEAN, - cboolean2 BOOLEAN) + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) STORED AS ORC tblproperties ("orc.compress"="NONE"); insert into table orc_llap_n0 -select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 -from alltypesorc; +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; +alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0'); + +insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; SET hive.llap.io.enabled=true; diff --git a/ql/src/test/queries/clientpositive/orc_create.q b/ql/src/test/queries/clientpositive/orc_create.q index 6d41009..dfae138 100644 --- a/ql/src/test/queries/clientpositive/orc_create.q +++ b/ql/src/test/queries/clientpositive/orc_create.q @@ -78,7 +78,7 @@ CREATE TABLE orc_create_people_staging ( first_name string, last_name string, address string, - salary decimal, + salary decimal(38,0), start_date timestamp, state string); @@ -90,7 +90,7 @@ CREATE TABLE orc_create_people ( first_name string, last_name string, address string, - salary decimal, + salary decimal(38,0), start_date timestamp) PARTITIONED BY (state string) STORED AS orc; diff --git a/ql/src/test/queries/clientpositive/orc_merge11.q b/ql/src/test/queries/clientpositive/orc_merge11.q index d5add84..208c5b7 100644 --- a/ql/src/test/queries/clientpositive/orc_merge11.q +++ b/ql/src/test/queries/clientpositive/orc_merge11.q @@ -3,15 +3,15 @@ set hive.vectorized.execution.enabled=false; DROP TABLE orcfile_merge1_n2; DROP TABLE orc_split_elim_n0; -create table orc_split_elim_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc; +create table orc_split_elim_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc; load data local inpath '../../data/files/orc_split_elim.orc' into table orc_split_elim_n0; load data local inpath '../../data/files/orc_split_elim.orc' into table orc_split_elim_n0; -create table orcfile_merge1_n2 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc tblproperties("orc.compress.size"="4096"); +create table orcfile_merge1_n2 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc tblproperties("orc.compress.size"="4096"); -insert overwrite table orcfile_merge1_n2 select * from orc_split_elim_n0; -insert into table orcfile_merge1_n2 select * from orc_split_elim_n0; +insert overwrite table orcfile_merge1_n2 select * from orc_split_elim_n0 order by userid; +insert into table orcfile_merge1_n2 select * from orc_split_elim_n0 order by userid; dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/orcfile_merge1_n2/; diff --git a/ql/src/test/queries/clientpositive/orc_merge5.q b/ql/src/test/queries/clientpositive/orc_merge5.q index 190c6e0..4ae5ba6 100644 --- a/ql/src/test/queries/clientpositive/orc_merge5.q +++ b/ql/src/test/queries/clientpositive/orc_merge5.q @@ -3,8 +3,8 @@ set hive.explain.user=false; -- SORT_QUERY_RESULTS -create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc; -create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc; +create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc; +create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc; load data local inpath '../../data/files/orc_split_elim.orc' into table orc_merge5_n5; diff --git a/ql/src/test/queries/clientpositive/orc_merge6.q b/ql/src/test/queries/clientpositive/orc_merge6.q index fabe656..1c7ab08 100644 --- a/ql/src/test/queries/clientpositive/orc_merge6.q +++ b/ql/src/test/queries/clientpositive/orc_merge6.q @@ -4,8 +4,8 @@ set hive.explain.user=false; -- SORT_QUERY_RESULTS -- orc file merge tests for static partitions -create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc; -create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (year string, hour int) stored as orc; +create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc; +create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (year string, hour int) stored as orc; load data local inpath '../../data/files/orc_split_elim.orc' into table orc_merge5_n4; diff --git a/ql/src/test/queries/clientpositive/orc_merge7.q b/ql/src/test/queries/clientpositive/orc_merge7.q index 2558797..6504989 100644 --- a/ql/src/test/queries/clientpositive/orc_merge7.q +++ b/ql/src/test/queries/clientpositive/orc_merge7.q @@ -5,8 +5,8 @@ set hive.explain.user=false; -- orc merge file tests for dynamic partition case -create table orc_merge5_n2 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc; -create table orc_merge5a_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc; +create table orc_merge5_n2 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc; +create table orc_merge5a_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc; load data local inpath '../../data/files/orc_split_elim.orc' into table orc_merge5_n2; diff --git a/ql/src/test/queries/clientpositive/orc_merge_incompat1.q b/ql/src/test/queries/clientpositive/orc_merge_incompat1.q index aba4617..2b768ea 100644 --- a/ql/src/test/queries/clientpositive/orc_merge_incompat1.q +++ b/ql/src/test/queries/clientpositive/orc_merge_incompat1.q @@ -3,8 +3,8 @@ set hive.explain.user=false; -- SORT_QUERY_RESULTS -create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc; -create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc; +create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc; +create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc; load data local inpath '../../data/files/orc_split_elim.orc' into table orc_merge5_n3; diff --git a/ql/src/test/queries/clientpositive/orc_merge_incompat2.q b/ql/src/test/queries/clientpositive/orc_merge_incompat2.q index ef66522..6281c96 100644 --- a/ql/src/test/queries/clientpositive/orc_merge_incompat2.q +++ b/ql/src/test/queries/clientpositive/orc_merge_incompat2.q @@ -6,8 +6,8 @@ set hive.explain.user=false; -- orc merge file tests for dynamic partition case -create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc; -create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc; +create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc; +create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc; load data local inpath '../../data/files/orc_split_elim.orc' into table orc_merge5; diff --git a/ql/src/test/queries/clientpositive/orc_schema_evolution_float.q b/ql/src/test/queries/clientpositive/orc_schema_evolution_float.q index c2d9840..ca5dc6f 100644 --- a/ql/src/test/queries/clientpositive/orc_schema_evolution_float.q +++ b/ql/src/test/queries/clientpositive/orc_schema_evolution_float.q @@ -1,6 +1,8 @@ set hive.vectorized.execution.enabled=false; set hive.optimize.index.filter=false; set hive.metastore.disallow.incompatible.col.type.changes=false; +-- set this to 'decimal_64' after resolving HIVE-19792 +set hive.vectorized.input.format.supports.enabled=none; drop table float_text; create table float_text(f float); diff --git a/ql/src/test/queries/clientpositive/orc_split_elimination.q b/ql/src/test/queries/clientpositive/orc_split_elimination.q index 719b21c..03e0e73 100644 --- a/ql/src/test/queries/clientpositive/orc_split_elimination.q +++ b/ql/src/test/queries/clientpositive/orc_split_elimination.q @@ -2,7 +2,7 @@ set hive.vectorized.execution.enabled=false; -- SORT_QUERY_RESULTS -create table orc_split_elim (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc; +create table orc_split_elim (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc; load data local inpath '../../data/files/orc_split_elim.orc' into table orc_split_elim; @@ -105,7 +105,7 @@ select userid,string1,subtype,decimal1,ts from orc_split_elim where userid<=70; SET hive.optimize.index.filter=false; -- partitioned table -create table orc_split_elim_part (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (country string, year int) stored as orc; +create table orc_split_elim_part (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (country string, year int) stored as orc; alter table orc_split_elim_part add partition(country='us', year=2000); alter table orc_split_elim_part add partition(country='us', year=2001); diff --git a/ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_primitive.q b/ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_primitive.q index 427734f..53c16e0 100644 --- a/ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_primitive.q +++ b/ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_primitive.q @@ -12,6 +12,8 @@ set hive.exec.dynamic.partition.mode=nonstrict; set hive.metastore.disallow.incompatible.col.type.changes=false; set hive.default.fileformat=orc; set hive.llap.io.enabled=false; +-- set this to 'decimal_64' after resolving HIVE-19792 +set hive.vectorized.input.format.supports.enabled=none; -- SORT_QUERY_RESULTS -- diff --git a/ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_primitive_llap_io.q b/ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_primitive_llap_io.q index 1eca9e3..f2fb2f0 100644 --- a/ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_primitive_llap_io.q +++ b/ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_primitive_llap_io.q @@ -13,6 +13,8 @@ set hive.metastore.disallow.incompatible.col.type.changes=false; set hive.default.fileformat=orc; set hive.llap.io.enabled=true; set hive.llap.io.encode.enabled=true; +-- set this to 'decimal_64' after resolving HIVE-19792 +set hive.vectorized.input.format.supports.enabled=none; -- SORT_QUERY_RESULTS -- diff --git a/ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_primitive.q b/ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_primitive.q index 6e35f5a..e811f1d 100644 --- a/ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_primitive.q +++ b/ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_primitive.q @@ -12,6 +12,8 @@ set hive.exec.dynamic.partition.mode=nonstrict; set hive.metastore.disallow.incompatible.col.type.changes=false; set hive.default.fileformat=orc; set hive.llap.io.enabled=false; +-- set this to 'decimal_64' after resolving HIVE-19792 +set hive.vectorized.input.format.supports.enabled=none; -- SORT_QUERY_RESULTS -- diff --git a/ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_primitive_llap_io.q b/ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_primitive_llap_io.q index 576f994..bae6cc8 100644 --- a/ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_primitive_llap_io.q +++ b/ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_primitive_llap_io.q @@ -13,6 +13,8 @@ set hive.metastore.disallow.incompatible.col.type.changes=false; set hive.default.fileformat=orc; set hive.llap.io.enabled=true; set hive.llap.io.encode.enabled=true; +-- set this to 'decimal_64' after resolving HIVE-19792 +set hive.vectorized.input.format.supports.enabled=none; -- SORT_QUERY_RESULTS -- diff --git a/ql/src/test/queries/clientpositive/type_change_test_int.q b/ql/src/test/queries/clientpositive/type_change_test_int.q index 112a674..2a49871 100644 --- a/ql/src/test/queries/clientpositive/type_change_test_int.q +++ b/ql/src/test/queries/clientpositive/type_change_test_int.q @@ -1,3 +1,6 @@ +-- set this to 'decimal_64' after resolving HIVE-19792 +set hive.vectorized.input.format.supports.enabled=none; + -- Create a base table to be used for loading data: Begin drop table if exists testAltCol_n1; create table testAltCol_n1 diff --git a/ql/src/test/queries/clientpositive/type_change_test_int_vectorized.q b/ql/src/test/queries/clientpositive/type_change_test_int_vectorized.q index 9e93a2f..6a940ac 100644 --- a/ql/src/test/queries/clientpositive/type_change_test_int_vectorized.q +++ b/ql/src/test/queries/clientpositive/type_change_test_int_vectorized.q @@ -1,3 +1,5 @@ +-- set this to 'decimal_64' after resolving HIVE-19792 +set hive.vectorized.input.format.supports.enabled=none; -- Create a base table to be used for loading data: Begin drop table if exists testAltCol; create table testAltCol diff --git a/ql/src/test/queries/clientpositive/vector_case_when_1.q b/ql/src/test/queries/clientpositive/vector_case_when_1.q index 0ba17da..8614087 100644 --- a/ql/src/test/queries/clientpositive/vector_case_when_1.q +++ b/ql/src/test/queries/clientpositive/vector_case_when_1.q @@ -5,6 +5,8 @@ set hive.explain.user=false; set hive.fetch.task.conversion=none; set hive.vectorized.execution.enabled=true; +-- SORT_QUERY_RESULTS + CREATE TABLE lineitem_test_txt (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, @@ -69,8 +71,7 @@ SELECT IF(L_SUPPKEY > 10000, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE), NULL) AS Field_10, IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 -FROM lineitem_test -ORDER BY Quantity; +FROM lineitem_test; SELECT L_QUANTITY as Quantity, CASE @@ -109,8 +110,7 @@ SELECT IF(L_SUPPKEY > 10000, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE), NULL) AS Field_10, IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 -FROM lineitem_test -ORDER BY Quantity; +FROM lineitem_test; SET hive.vectorized.if.expr.mode=good; @@ -153,8 +153,7 @@ SELECT IF(L_SUPPKEY > 10000, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE), NULL) AS Field_10, IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 -FROM lineitem_test -ORDER BY Quantity; +FROM lineitem_test; SELECT L_QUANTITY as Quantity, CASE @@ -193,8 +192,7 @@ SELECT IF(L_SUPPKEY > 10000, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE), NULL) AS Field_10, IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 -FROM lineitem_test -ORDER BY Quantity; +FROM lineitem_test; SET hive.vectorized.if.expr.mode=better; @@ -237,8 +235,7 @@ SELECT IF(L_SUPPKEY > 10000, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE), NULL) AS Field_10, IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 -FROM lineitem_test -ORDER BY Quantity; +FROM lineitem_test; SELECT L_QUANTITY as Quantity, CASE @@ -277,6 +274,5 @@ SELECT IF(L_SUPPKEY > 10000, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE), NULL) AS Field_10, IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 -FROM lineitem_test -ORDER BY Quantity; - \ No newline at end of file +FROM lineitem_test; + diff --git a/ql/src/test/queries/clientpositive/vector_decimal_5.q b/ql/src/test/queries/clientpositive/vector_decimal_5.q index f5de13b..e0956e4 100644 --- a/ql/src/test/queries/clientpositive/vector_decimal_5.q +++ b/ql/src/test/queries/clientpositive/vector_decimal_5.q @@ -21,9 +21,10 @@ SELECT key FROM DECIMAL_5 ORDER BY key; SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key; +explain SELECT cast(key as decimal) FROM DECIMAL_5; SELECT cast(key as decimal) FROM DECIMAL_5; SELECT cast(key as decimal(6,3)) FROM DECIMAL_5; DROP TABLE DECIMAL_5_txt; -DROP TABLE DECIMAL_5; \ No newline at end of file +DROP TABLE DECIMAL_5; diff --git a/ql/src/test/queries/clientpositive/vector_decimal_mapjoin.q b/ql/src/test/queries/clientpositive/vector_decimal_mapjoin.q index 6e5b5b6..ef769fb 100644 --- a/ql/src/test/queries/clientpositive/vector_decimal_mapjoin.q +++ b/ql/src/test/queries/clientpositive/vector_decimal_mapjoin.q @@ -35,11 +35,13 @@ select t1_n48.`dec`, t2_n29.`dec` from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n2 -- SORT_QUERY_RESULTS select t1_n48.`dec`, t2_n29.`dec` from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`); +select count(*) from (select t1_n48.`dec`, t2_n29.`dec` from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t; explain vectorization detail select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`); select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`); +select count(*) from (select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t; @@ -72,11 +74,13 @@ select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.` -- SORT_QUERY_RESULTS select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`); +select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t; explain vectorization detail select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`); select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`); +select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t; set hive.vectorized.input.format.supports.enabled=none; @@ -87,9 +91,11 @@ select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.` -- SORT_QUERY_RESULTS select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`); +select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t; explain vectorization detail select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`); select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`); +select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t; diff --git a/ql/src/test/queries/clientpositive/vectorized_dynamic_semijoin_reduction2.q b/ql/src/test/queries/clientpositive/vectorized_dynamic_semijoin_reduction2.q index 7998035..743e8db 100644 --- a/ql/src/test/queries/clientpositive/vectorized_dynamic_semijoin_reduction2.q +++ b/ql/src/test/queries/clientpositive/vectorized_dynamic_semijoin_reduction2.q @@ -39,7 +39,7 @@ EXPLAIN select count(*) from dsrv2_big a join dsrv2_small b on (a.partkey_bigint select count(*) from dsrv2_big a join dsrv2_small b on (a.partkey_bigint = b.partkey_bigint); -- single key (decimal) -EXPLAIN select count(*) from dsrv2_big a join dsrv2_small b on (a.partkey_decimal = b.partkey_decimal); +EXPLAIN VECTORIZATION DETAIL select count(*) from dsrv2_big a join dsrv2_small b on (a.partkey_decimal = b.partkey_decimal); select count(*) from dsrv2_big a join dsrv2_small b on (a.partkey_decimal = b.partkey_decimal); -- single key (double) diff --git a/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out b/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out index eb4a8cb..80bbba4 100644 --- a/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out +++ b/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out @@ -1144,8 +1144,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1342,8 +1342,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1511,8 +1511,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1541,8 +1541,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1929,8 +1929,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2128,8 +2128,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2298,8 +2298,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2328,8 +2328,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out b/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out index 957dfd8..66bb2db 100644 --- a/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out +++ b/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out @@ -665,22 +665,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: over10k_orc_bucketed - Statistics: Num rows: 1247 Data size: 713720 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1237 Data size: 707880 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ROW__ID (type: struct) outputColumnNames: ROW__ID - Statistics: Num rows: 1247 Data size: 713720 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1237 Data size: 707880 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() keys: ROW__ID (type: struct) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 623 Data size: 52332 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: struct) sort order: + Map-reduce partition columns: _col0 (type: struct) - Statistics: Num rows: 623 Data size: 52332 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: llap LLAP IO: may be used (ACID table) @@ -692,13 +692,13 @@ STAGE PLANS: keys: KEY._col0 (type: struct) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 623 Data size: 52332 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (_col1 > 1L) (type: boolean) - Statistics: Num rows: 207 Data size: 17388 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 207 Data size: 17388 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out b/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out index 84477c3..7a880dd 100644 --- a/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out +++ b/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out @@ -3233,19 +3233,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_uami_n1 - Statistics: Num rows: 267 Data size: 83640 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 281 Data size: 87904 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((de = 109.23) or (de = 119.23)) and enforce_constraint(vc is not null)) (type: boolean) - Statistics: Num rows: 5 Data size: 1566 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1564 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ROW__ID (type: struct), i (type: int), vc (type: varchar(128)) outputColumnNames: _col0, _col1, _col3 - Statistics: Num rows: 5 Data size: 1566 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1564 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: struct) sort order: + Map-reduce partition columns: UDFToInteger(_col0) (type: int) - Statistics: Num rows: 5 Data size: 1566 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1564 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col3 (type: varchar(128)) Execution mode: vectorized, llap LLAP IO: may be used (ACID table) @@ -3255,10 +3255,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: struct), VALUE._col0 (type: int), 3.14 (type: decimal(5,2)), VALUE._col1 (type: varchar(128)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 5 Data size: 1566 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1564 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 1566 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1564 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat @@ -3326,7 +3326,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid_uami_n1 - Statistics: Num rows: 305 Data size: 95448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 320 Data size: 100040 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((de = 3.14) and enforce_constraint((i is not null and vc is not null))) (type: boolean) Statistics: Num rows: 2 Data size: 625 Basic stats: COMPLETE Column stats: NONE diff --git a/ql/src/test/results/clientpositive/llap/llap_acid.q.out b/ql/src/test/results/clientpositive/llap/llap_acid.q.out index 6196efe..635f928 100644 --- a/ql/src/test/results/clientpositive/llap/llap_acid.q.out +++ b/ql/src/test/results/clientpositive/llap/llap_acid.q.out @@ -124,8 +124,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -269,8 +269,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -378,8 +378,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/llap_acid2.q.out b/ql/src/test/results/clientpositive/llap/llap_acid2.q.out index 4d74a17..c3e9c2a 100644 --- a/ql/src/test/results/clientpositive/llap/llap_acid2.q.out +++ b/ql/src/test/results/clientpositive/llap/llap_acid2.q.out @@ -16,8 +16,10 @@ PREHOOK: query: CREATE TABLE orc_llap_n2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='true') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='true','orc.write.format'='UNSTABLE-PRE-2.0') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_llap_n2 @@ -35,8 +37,10 @@ POSTHOOK: query: CREATE TABLE orc_llap_n2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='true') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='true','orc.write.format'='UNSTABLE-PRE-2.0') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_llap_n2 @@ -44,7 +48,8 @@ PREHOOK: query: insert into table orc_llap_n2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc PREHOOK: Output: default@orc_llap_n2 @@ -52,13 +57,58 @@ POSTHOOK: query: insert into table orc_llap_n2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc POSTHOOK: Output: default@orc_llap_n2 POSTHOOK: Lineage: orc_llap_n2.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cbigint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cbigint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n2.cdecimal2 EXPRESSION [] +POSTHOOK: Lineage: orc_llap_n2.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdouble0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdouble1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +PREHOOK: query: alter table orc_llap_n2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@orc_llap_n2 +PREHOOK: Output: default@orc_llap_n2 +POSTHOOK: query: alter table orc_llap_n2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@orc_llap_n2 +POSTHOOK: Output: default@orc_llap_n2 +PREHOOK: query: insert into table orc_llap_n2 +select cint, cbigint, cfloat, cdouble, + cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, + cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, + cstring1, cfloat as c9, cast("3.321" as decimal(10,3))as c10, + cast("9.987654321" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n2 +POSTHOOK: query: insert into table orc_llap_n2 +select cint, cbigint, cfloat, cdouble, + cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, + cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, + cstring1, cfloat as c9, cast("3.321" as decimal(10,3))as c10, + cast("9.987654321" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n2 +POSTHOOK: Lineage: orc_llap_n2.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cbigint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cbigint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n2.cdecimal2 EXPRESSION [] POSTHOOK: Lineage: orc_llap_n2.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cdouble0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cdouble1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] @@ -84,8 +134,10 @@ PREHOOK: query: CREATE TABLE orc_llap2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='false') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='false', 'orc.write.format'='UNSTABLE-PRE-2.0') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_llap2 @@ -103,8 +155,10 @@ POSTHOOK: query: CREATE TABLE orc_llap2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='false') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='false', 'orc.write.format'='UNSTABLE-PRE-2.0') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_llap2 @@ -112,7 +166,8 @@ PREHOOK: query: insert into table orc_llap2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc PREHOOK: Output: default@orc_llap2 @@ -120,13 +175,16 @@ POSTHOOK: query: insert into table orc_llap2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc POSTHOOK: Output: default@orc_llap2 POSTHOOK: Lineage: orc_llap2.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap2.cbigint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap2.cbigint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap2.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap2.cdecimal2 EXPRESSION [] POSTHOOK: Lineage: orc_llap2.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap2.cdouble0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap2.cdouble1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] @@ -138,19 +196,21 @@ POSTHOOK: Lineage: orc_llap2.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(n POSTHOOK: Lineage: orc_llap2.cint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] POSTHOOK: Lineage: orc_llap2.cint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] POSTHOOK: Lineage: orc_llap2.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] -PREHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true') +PREHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@orc_llap2 PREHOOK: Output: default@orc_llap2 -POSTHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true') +POSTHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') POSTHOOK: type: ALTERTABLE_PROPERTIES POSTHOOK: Input: default@orc_llap2 POSTHOOK: Output: default@orc_llap2 -PREHOOK: query: update orc_llap2 set cstring1 = 'testvalue' where cstring1 = 'N016jPED08o' +PREHOOK: query: update orc_llap2 set cstring1 = 'testvalue', cdecimal1 = cast("3.321" as decimal(10,3)), +cdecimal2 = cast("9.987654321" as decimal(38,18)) where cstring1 = 'N016jPED08o' PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap2 PREHOOK: Output: default@orc_llap2 -POSTHOOK: query: update orc_llap2 set cstring1 = 'testvalue' where cstring1 = 'N016jPED08o' +POSTHOOK: query: update orc_llap2 set cstring1 = 'testvalue', cdecimal1 = cast("3.321" as decimal(10,3)), +cdecimal2 = cast("9.987654321" as decimal(38,18)) where cstring1 = 'N016jPED08o' POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_llap2 POSTHOOK: Output: default@orc_llap2 @@ -192,6 +252,36 @@ xTlDv24JYv4s 7wH3hBKdO55Xq3gEEe0 5QLs0LVK1g ET3d4F2I4lV +N016jPED08o +Q1JAdUlCVORmR0Q5X5Vf5u6 +eNsh5tYa +5j7GJ8OCXgMVIcK7 +uJGHsW3cd073NGFITyQ +G1u0pUmU6ehCm +mk6lShdOa8kXT8i7mLd3fK +u5C7glqT5XqtO0JE2686lk1 +h4omSc1jcLLwW +tFY2ng51v +vmAT10eeE47fgH20pLi +uN803aW +qqbDw46IgGds4 +32v414p63Jv1B4tO1xy +73xdw4X +d3o1712a03n20qvi62U7 +eQ80MW0h728I204P87YXc +KHtD2A2hp6OjFgS73gdgE +nI30tm7U55O0gI +LSJtFA66 +mby00c +meGb5 +pM6Gt05s1YJeii +LR2AKy0dPt8vFdIV5760jriw +1B3WMD5LSk65B2Moa +xTlDv24JYv4s +28Oe6r21yux7Lk47 +7wH3hBKdO55Xq3gEEe0 +5QLs0LVK1g +ET3d4F2I4lV PREHOOK: query: select cfloat2, cint from orc_llap_n2 PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap_n2 @@ -230,6 +320,36 @@ NULL -899422227 11.0 385623629 11.0 681126962 11.0 25892751 +NULL -838810013 +NULL 246423894 +NULL 708885482 +NULL 186967185 +NULL -595277064 +NULL 584923170 +NULL 518213127 +NULL -334595454 +NULL 241008004 +NULL 185212032 +NULL -738747840 +NULL -971543377 +NULL 940448896 +NULL -324030556 +NULL -899422227 +11.0 835111400 +11.0 -775326158 +11.0 653630202 +11.0 779427499 +11.0 797003983 +11.0 31832752 +11.0 783790031 +11.0 -898241885 +11.0 NULL +11.0 -646295381 +11.0 130912195 +11.0 -391573084 +11.0 385623629 +11.0 681126962 +11.0 25892751 PREHOOK: query: select * from orc_llap_n2 PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap_n2 @@ -238,36 +358,66 @@ POSTHOOK: query: select * from orc_llap_n2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_llap_n2 #### A masked pattern was here #### --838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL N016jPED08o NULL -246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL -708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL -186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL --595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL -584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL -518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL --334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL -241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL -185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL --738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL --971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL -940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL --324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL --899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL -835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 --775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 -653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 -779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 -797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 -31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 -783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 --898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 -NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 --646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 -130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 --391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 -385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 -681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 -25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 +-838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL N016jPED08o NULL 3.321 9.9876543210 +246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL 3.321 9.9876543210 +708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL 3.321 9.9876543210 +186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL 3.321 9.9876543210 +-595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL 3.321 9.9876543210 +584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL 3.321 9.9876543210 +518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL 3.321 9.9876543210 +-334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL 3.321 9.9876543210 +241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL 3.321 9.9876543210 +185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL 3.321 9.9876543210 +-738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL 3.321 9.9876543210 +-971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL 3.321 9.9876543210 +940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL 3.321 9.9876543210 +-324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL 3.321 9.9876543210 +-899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL 3.321 9.9876543210 +835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 3.321 9.9876543210 +-775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 3.321 9.9876543210 +653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 3.321 9.9876543210 +779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 3.321 9.9876543210 +797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 3.321 9.9876543210 +31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 3.321 9.9876543210 +783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 3.321 9.9876543210 +-898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 3.321 9.9876543210 +NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 3.321 9.9876543210 +-646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 3.321 9.9876543210 +130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 3.321 9.9876543210 +-391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 3.321 9.9876543210 +385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 3.321 9.9876543210 +681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 3.321 9.9876543210 +25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 3.321 9.9876543210 +-838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL N016jPED08o NULL 1.123 1.1234567890 +246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL 1.123 1.1234567890 +708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL 1.123 1.1234567890 +186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL 1.123 1.1234567890 +-595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL 1.123 1.1234567890 +584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL 1.123 1.1234567890 +518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL 1.123 1.1234567890 +-334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL 1.123 1.1234567890 +241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL 1.123 1.1234567890 +185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL 1.123 1.1234567890 +-738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL 1.123 1.1234567890 +-971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL 1.123 1.1234567890 +940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL 1.123 1.1234567890 +-324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL 1.123 1.1234567890 +-899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL 1.123 1.1234567890 +835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 1.123 1.1234567890 +-775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 1.123 1.1234567890 +653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 1.123 1.1234567890 +779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 1.123 1.1234567890 +797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 1.123 1.1234567890 +31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 1.123 1.1234567890 +783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 1.123 1.1234567890 +-898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 1.123 1.1234567890 +NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 1.123 1.1234567890 +-646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 1.123 1.1234567890 +130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 1.123 1.1234567890 +-391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 1.123 1.1234567890 +385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 1.123 1.1234567890 +681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 1.123 1.1234567890 +25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 1.123 1.1234567890 PREHOOK: query: select cstring1 from orc_llap2 PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap2 @@ -352,36 +502,36 @@ POSTHOOK: query: select * from orc_llap2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_llap2 #### A masked pattern was here #### -246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL -708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL -186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL --595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL -584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL -518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL --334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL -241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL -185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL --738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL --971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL -940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL --324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL --899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL -835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 --775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 -653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 -779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 -797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 -31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 -783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 --898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 -NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 --646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 -130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 --391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 -385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 -681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 -25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 --838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL testvalue NULL +246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL 1.123 1.1234567890 +708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL 1.123 1.1234567890 +186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL 1.123 1.1234567890 +-595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL 1.123 1.1234567890 +584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL 1.123 1.1234567890 +518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL 1.123 1.1234567890 +-334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL 1.123 1.1234567890 +241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL 1.123 1.1234567890 +185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL 1.123 1.1234567890 +-738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL 1.123 1.1234567890 +-971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL 1.123 1.1234567890 +940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL 1.123 1.1234567890 +-324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL 1.123 1.1234567890 +-899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL 1.123 1.1234567890 +835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 1.123 1.1234567890 +-775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 1.123 1.1234567890 +653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 1.123 1.1234567890 +779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 1.123 1.1234567890 +797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 1.123 1.1234567890 +31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 1.123 1.1234567890 +783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 1.123 1.1234567890 +-898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 1.123 1.1234567890 +NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 1.123 1.1234567890 +-646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 1.123 1.1234567890 +130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 1.123 1.1234567890 +-391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 1.123 1.1234567890 +385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 1.123 1.1234567890 +681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 1.123 1.1234567890 +25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 1.123 1.1234567890 +-838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL testvalue NULL 3.321 9.9876543210 PREHOOK: query: DROP TABLE orc_llap_n2 PREHOOK: type: DROPTABLE PREHOOK: Input: default@orc_llap_n2 diff --git a/ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out b/ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out index 37c213b..c4dc6f7 100644 --- a/ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out +++ b/ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out @@ -118,8 +118,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -263,8 +263,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -372,8 +372,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/llap_decimal64_reader.q.out b/ql/src/test/results/clientpositive/llap/llap_decimal64_reader.q.out new file mode 100644 index 0000000..0041206 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/llap_decimal64_reader.q.out @@ -0,0 +1,303 @@ +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("4.456" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("4.456" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + sort order: ++ + Map-reduce partition columns: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + TopN Hash Memory Usage: 0.1 + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: decimal(10,2)), KEY._col1 (type: decimal(38,5)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +POSTHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +4.46 5.56789 +3.35 5.56789 +PREHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + sort order: ++ + Map-reduce partition columns: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + TopN Hash Memory Usage: 0.1 + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: decimal(10,2)), KEY._col1 (type: decimal(38,5)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +POSTHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +4.46 5.56789 +3.35 5.56789 +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 diff --git a/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out b/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out index faab23c..e6fa1ac 100644 --- a/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out +++ b/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out @@ -1679,8 +1679,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1745,8 +1745,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2127,8 +2127,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/llap_text.q.out b/ql/src/test/results/clientpositive/llap/llap_text.q.out new file mode 100644 index 0000000..40d08d3 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/llap_text.q.out @@ -0,0 +1,1082 @@ +PREHOOK: query: DROP TABLE text_llap +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE text_llap +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE text_llap( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) +row format serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + + outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap +POSTHOOK: query: CREATE TABLE text_llap( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) +row format serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + + outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap +PREHOOK: query: insert into table text_llap +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap +POSTHOOK: query: insert into table text_llap +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap +POSTHOOK: Lineage: text_llap.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: create table text_llap2( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + bo boolean, + s string, + ts timestamp, + `dec` decimal, + bin binary) +row format delimited fields terminated by '|' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + +outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap2 +POSTHOOK: query: create table text_llap2( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + bo boolean, + s string, + ts timestamp, + `dec` decimal, + bin binary) +row format delimited fields terminated by '|' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + +outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap2 +PREHOOK: query: load data local inpath '../../data/files/over10k.gz' into table text_llap2 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@text_llap2 +POSTHOOK: query: load data local inpath '../../data/files/over10k.gz' into table text_llap2 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@text_llap2 +PREHOOK: query: create table text_llap1 like text_llap +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap1 +POSTHOOK: query: create table text_llap1 like text_llap +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap1 +PREHOOK: query: create table text_llap100 like text_llap +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap100 +POSTHOOK: query: create table text_llap100 like text_llap +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap100 +PREHOOK: query: create table text_llap1000 like text_llap +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap1000 +POSTHOOK: query: create table text_llap1000 like text_llap +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap1000 +PREHOOK: query: insert into table text_llap1 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap1 +POSTHOOK: query: insert into table text_llap1 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap1 +POSTHOOK: Lineage: text_llap1.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap1.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap1.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap1.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap1.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap1.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: insert into table text_llap100 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap100 +POSTHOOK: query: insert into table text_llap100 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap100 +POSTHOOK: Lineage: text_llap100.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap100.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap100.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap100.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap100.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap100.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap100.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap100.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap100.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap100.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap100.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap100.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: insert into table text_llap1000 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1000 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap1000 +POSTHOOK: query: insert into table text_llap1000 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1000 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap1000 +POSTHOOK: Lineage: text_llap1000.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap1000.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1000.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1000.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1000.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 alice carson 2013-03-01 09:11:58.703074 +-2 alice nixon 2013-03-01 09:11:58.703321 +-2 alice underhill 2013-03-01 09:11:58.703122 +-2 alice underhill 2013-03-01 09:11:58.703127 +-2 alice xylophone 2013-03-01 09:11:58.703105 +-2 bob falkner 2013-03-01 09:11:58.703071 +-2 bob king 2013-03-01 09:11:58.703236 +-2 bob ovid 2013-03-01 09:11:58.703285 +-2 bob van buren 2013-03-01 09:11:58.703218 +-2 bob xylophone 2013-03-01 09:11:58.703219 +-2 calvin xylophone 2013-03-01 09:11:58.703083 +-2 david falkner 2013-03-01 09:11:58.703254 +-2 david laertes 2013-03-01 09:11:58.703076 +-2 david miller 2013-03-01 09:11:58.703238 +-3 alice allen 2013-03-01 09:11:58.703323 +-3 alice davidson 2013-03-01 09:11:58.703226 +-3 alice falkner 2013-03-01 09:11:58.703304 +-3 alice king 2013-03-01 09:11:58.70314 +-3 alice king 2013-03-01 09:11:58.703247 +-3 alice xylophone 2013-03-01 09:11:58.703129 +-3 bob ellison 2013-03-01 09:11:58.703261 +-3 bob falkner 2013-03-01 09:11:58.70328 +-3 bob ichabod 2013-03-01 09:11:58.70324 +-3 bob johnson 2013-03-01 09:11:58.703204 +-3 bob polk 2013-03-01 09:11:58.703128 +-3 bob underhill 2013-03-01 09:11:58.703176 +-3 bob underhill 2013-03-01 09:11:58.703188 +-3 bob van buren 2013-03-01 09:11:58.703199 +-3 calvin ichabod 2013-03-01 09:11:58.703213 +-3 calvin white 2013-03-01 09:11:58.703295 +-3 david carson 2013-03-01 09:11:58.703136 +-3 david falkner 2013-03-01 09:11:58.703305 +-3 david garcia 2013-03-01 09:11:58.70319 +-3 david hernandez 2013-03-01 09:11:58.703252 +-3 ethan steinbeck 2013-03-01 09:11:58.703079 +-3 ethan underhill 2013-03-01 09:11:58.703138 +-3 fred ellison 2013-03-01 09:11:58.703233 +-3 gabriella brown 2013-03-01 09:11:58.703288 +-3 holly nixon 2013-03-01 09:11:58.703262 +-3 holly polk 2013-03-01 09:11:58.703273 +-3 holly steinbeck 2013-03-01 09:11:58.703242 +-3 holly thompson 2013-03-01 09:11:58.703073 +-3 holly underhill 2013-03-01 09:11:58.703219 +-3 irene ellison 2013-03-01 09:11:58.703092 +-3 irene underhill 2013-03-01 09:11:58.703298 +-3 irene young 2013-03-01 09:11:58.703084 +-3 jessica johnson 2013-03-01 09:11:58.703319 +-3 jessica king 2013-03-01 09:11:58.703279 +-3 jessica miller 2013-03-01 09:11:58.703245 +-3 jessica white 2013-03-01 09:11:58.703199 +-3 katie ichabod 2013-03-01 09:11:58.703139 +-3 luke garcia 2013-03-01 09:11:58.703076 +-3 luke ichabod 2013-03-01 09:11:58.703294 +-3 luke king 2013-03-01 09:11:58.703207 +-3 luke young 2013-03-01 09:11:58.703182 +-3 mike allen 2013-03-01 09:11:58.703292 +-3 mike king 2013-03-01 09:11:58.703214 +-3 mike polk 2013-03-01 09:11:58.70319 +-3 mike white 2013-03-01 09:11:58.703087 +-3 mike xylophone 2013-03-01 09:11:58.703308 +-3 nick nixon 2013-03-01 09:11:58.703083 +-3 nick robinson 2013-03-01 09:11:58.703147 +-3 oscar davidson 2013-03-01 09:11:58.703071 +-3 oscar garcia 2013-03-01 09:11:58.703282 +-3 oscar johnson 2013-03-01 09:11:58.70311 +-3 oscar johnson 2013-03-01 09:11:58.703133 +-3 oscar miller 2013-03-01 09:11:58.70332 +-3 priscilla laertes 2013-03-01 09:11:58.70325 +-3 priscilla quirinius 2013-03-01 09:11:58.703228 +-3 priscilla zipper 2013-03-01 09:11:58.703321 +-3 quinn ellison 2013-03-01 09:11:58.703232 +-3 quinn polk 2013-03-01 09:11:58.703244 +-3 rachel davidson 2013-03-01 09:11:58.703316 +-3 rachel thompson 2013-03-01 09:11:58.703276 +-3 sarah miller 2013-03-01 09:11:58.70316 +-3 sarah robinson 2013-03-01 09:11:58.703288 +-3 sarah xylophone 2013-03-01 09:11:58.703112 +-3 sarah zipper 2013-03-01 09:11:58.703289 +-3 tom hernandez 2013-03-01 09:11:58.703108 +-3 tom hernandez 2013-03-01 09:11:58.703188 +-3 tom polk 2013-03-01 09:11:58.703217 +-3 tom steinbeck 2013-03-01 09:11:58.703251 +-3 ulysses carson 2013-03-01 09:11:58.703253 +-3 ulysses ellison 2013-03-01 09:11:58.703197 +-3 ulysses quirinius 2013-03-01 09:11:58.703189 +-3 ulysses robinson 2013-03-01 09:11:58.703227 +-3 ulysses steinbeck 2013-03-01 09:11:58.703259 +-3 victor allen 2013-03-01 09:11:58.703155 +-3 victor hernandez 2013-03-01 09:11:58.703176 +-3 victor robinson 2013-03-01 09:11:58.703305 +-3 victor thompson 2013-03-01 09:11:58.703299 +-3 victor xylophone 2013-03-01 09:11:58.703135 +-3 wendy quirinius 2013-03-01 09:11:58.703266 +-3 wendy robinson 2013-03-01 09:11:58.703294 +-3 wendy xylophone 2013-03-01 09:11:58.703191 +-3 xavier garcia 2013-03-01 09:11:58.703194 +-3 xavier ovid 2013-03-01 09:11:58.703148 +-3 yuri xylophone 2013-03-01 09:11:58.703258 +-3 zach thompson 2013-03-01 09:11:58.703252 +-3 zach young 2013-03-01 09:11:58.703191 +PREHOOK: query: select * from text_llap2 order by t, s, ts limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select * from text_llap2 order by t, s, ts limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 305 65767 4294967529 76.54 4.72 true calvin xylophone 2013-03-01 09:11:58.703083 69 quiet hour +-2 331 65707 4294967335 67.12 13.51 false bob ovid 2013-03-01 09:11:58.703285 62 joggying +-2 373 65548 4294967423 16.98 43.6 true alice nixon 2013-03-01 09:11:58.703321 53 debate +-2 378 65553 4294967461 9.81 10.36 true bob king 2013-03-01 09:11:58.703236 91 opthamology +-2 389 65706 4294967488 26.68 17.93 false alice underhill 2013-03-01 09:11:58.703122 87 forestry +-2 389 65738 4294967520 99.45 26.26 true bob falkner 2013-03-01 09:11:58.703071 17 nap time +-2 393 65715 4294967305 48.3 1.85 true alice xylophone 2013-03-01 09:11:58.703105 30 values clariffication +-2 406 65582 4294967311 20.94 35.74 false bob van buren 2013-03-01 09:11:58.703218 25 opthamology +-2 406 65762 4294967443 1.79 33.42 false david falkner 2013-03-01 09:11:58.703254 58 opthamology +-2 407 65612 4294967318 25.48 41.56 true david laertes 2013-03-01 09:11:58.703076 40 forestry +-2 427 65666 4294967465 19.69 33.24 true bob xylophone 2013-03-01 09:11:58.703219 33 joggying +-2 446 65790 4294967302 6.49 10.81 false alice underhill 2013-03-01 09:11:58.703127 44 undecided +-2 450 65727 4294967487 94.57 30.4 false david miller 2013-03-01 09:11:58.703238 40 religion +-2 473 65565 4294967320 87.78 12.26 true alice carson 2013-03-01 09:11:58.703074 90 xylophone band +-3 260 65595 4294967545 59.07 6.75 false bob falkner 2013-03-01 09:11:58.70328 37 chemistry +-3 264 65776 4294967398 20.95 5.97 false bob polk 2013-03-01 09:11:58.703128 93 joggying +-3 266 65736 4294967397 19.94 10.01 false quinn ellison 2013-03-01 09:11:58.703232 89 forestry +-3 268 65710 4294967448 82.74 12.48 true holly polk 2013-03-01 09:11:58.703273 15 undecided +-3 270 65702 4294967512 38.05 1.07 true david carson 2013-03-01 09:11:58.703136 28 philosophy +-3 275 65543 4294967522 74.92 17.29 false mike king 2013-03-01 09:11:58.703214 53 opthamology +-3 275 65575 4294967441 38.22 2.43 true sarah xylophone 2013-03-01 09:11:58.703112 93 wind surfing +-3 275 65622 4294967302 71.78 8.49 false wendy robinson 2013-03-01 09:11:58.703294 95 undecided +-3 279 65661 4294967536 25.5 0.02 false wendy quirinius 2013-03-01 09:11:58.703266 75 undecided +-3 280 65548 4294967350 52.3 33.06 true calvin white 2013-03-01 09:11:58.703295 30 quiet hour +-3 280 65597 4294967377 18.44 49.8 true alice falkner 2013-03-01 09:11:58.703304 74 zync studies +-3 280 65769 4294967324 28.78 35.05 true xavier ovid 2013-03-01 09:11:58.703148 43 kindergarten +-3 284 65566 4294967400 62.81 39.1 false jessica white 2013-03-01 09:11:58.703199 70 opthamology +-3 286 65573 4294967493 18.27 23.71 false zach young 2013-03-01 09:11:58.703191 22 kindergarten +-3 289 65757 4294967528 56.2 44.24 true luke ichabod 2013-03-01 09:11:58.703294 7 yard duty +-3 298 65720 4294967305 34.6 39.7 false ethan steinbeck 2013-03-01 09:11:58.703079 35 kindergarten +-3 299 65763 4294967542 85.96 10.45 true jessica miller 2013-03-01 09:11:58.703245 26 mathematics +-3 303 65617 4294967473 10.26 1.41 false ulysses quirinius 2013-03-01 09:11:58.703189 84 chemistry +-3 307 65634 4294967546 90.3 28.44 false irene underhill 2013-03-01 09:11:58.703298 85 forestry +-3 311 65569 4294967460 3.82 35.45 false luke garcia 2013-03-01 09:11:58.703076 93 chemistry +-3 313 65540 4294967316 25.67 39.88 false ulysses robinson 2013-03-01 09:11:58.703227 61 religion +-3 314 65670 4294967330 13.67 34.86 false wendy xylophone 2013-03-01 09:11:58.703191 85 mathematics +-3 315 65671 4294967412 94.22 25.96 true oscar johnson 2013-03-01 09:11:58.703133 89 nap time +-3 316 65696 4294967445 22.0 43.41 false priscilla laertes 2013-03-01 09:11:58.70325 51 values clariffication +-3 318 65553 4294967452 9.86 32.77 false holly underhill 2013-03-01 09:11:58.703219 47 wind surfing +-3 320 65644 4294967434 84.39 48.0 false sarah robinson 2013-03-01 09:11:58.703288 72 wind surfing +-3 324 65773 4294967296 11.07 25.95 true oscar miller 2013-03-01 09:11:58.70332 57 opthamology +-3 333 65562 4294967359 22.34 35.58 false ulysses steinbeck 2013-03-01 09:11:58.703259 87 xylophone band +-3 335 65696 4294967333 72.26 9.66 true nick nixon 2013-03-01 09:11:58.703083 85 philosophy +-3 337 65629 4294967521 55.59 6.54 true luke king 2013-03-01 09:11:58.703207 59 industrial engineering +-3 337 65658 4294967361 43.4 12.05 false victor allen 2013-03-01 09:11:58.703155 45 topology +-3 339 65671 4294967311 8.37 15.98 true bob ellison 2013-03-01 09:11:58.703261 14 linguistics +-3 339 65737 4294967453 14.23 26.66 true ethan underhill 2013-03-01 09:11:58.703138 95 xylophone band +-3 343 65783 4294967378 7.1 18.16 true ulysses carson 2013-03-01 09:11:58.703253 97 mathematics +-3 344 65733 4294967363 0.56 11.96 true rachel thompson 2013-03-01 09:11:58.703276 88 wind surfing +-3 344 65756 4294967378 52.13 18.95 true victor thompson 2013-03-01 09:11:58.703299 81 topology +-3 346 65752 4294967298 56.05 34.03 false tom polk 2013-03-01 09:11:58.703217 49 zync studies +-3 350 65566 4294967434 23.22 6.68 true nick robinson 2013-03-01 09:11:58.703147 24 education +-3 362 65712 4294967325 43.73 48.74 false oscar garcia 2013-03-01 09:11:58.703282 30 chemistry +-3 374 65731 4294967388 22.35 22.71 true bob johnson 2013-03-01 09:11:58.703204 80 biology +-3 376 65548 4294967431 96.78 43.23 false fred ellison 2013-03-01 09:11:58.703233 75 education +-3 376 65766 4294967326 97.88 5.58 true sarah zipper 2013-03-01 09:11:58.703289 49 study skills +-3 381 65640 4294967379 59.34 7.97 false ulysses ellison 2013-03-01 09:11:58.703197 32 undecided +-3 384 65613 4294967470 63.49 45.85 false holly steinbeck 2013-03-01 09:11:58.703242 54 chemistry +-3 384 65676 4294967453 71.97 31.52 false alice davidson 2013-03-01 09:11:58.703226 14 xylophone band +-3 386 65611 4294967331 58.81 22.43 true sarah miller 2013-03-01 09:11:58.70316 75 mathematics +-3 386 65716 4294967496 12.12 2.37 false zach thompson 2013-03-01 09:11:58.703252 16 linguistics +-3 387 65550 4294967355 84.75 22.75 true holly thompson 2013-03-01 09:11:58.703073 52 biology +-3 400 65557 4294967503 76.31 29.44 false alice allen 2013-03-01 09:11:58.703323 19 debate +-3 408 65667 4294967509 81.68 45.9 true david hernandez 2013-03-01 09:11:58.703252 52 topology +-3 414 65608 4294967338 81.39 49.09 true tom steinbeck 2013-03-01 09:11:58.703251 11 xylophone band +-3 415 65571 4294967536 61.81 24.24 true victor robinson 2013-03-01 09:11:58.703305 23 american history +-3 423 65646 4294967378 63.19 34.04 false priscilla quirinius 2013-03-01 09:11:58.703228 35 xylophone band +-3 430 65667 4294967469 65.5 40.46 true yuri xylophone 2013-03-01 09:11:58.703258 31 american history +-3 431 65635 4294967500 29.06 0.34 false calvin ichabod 2013-03-01 09:11:58.703213 29 undecided +-3 432 65646 4294967492 0.83 27.18 true oscar davidson 2013-03-01 09:11:58.703071 56 linguistics +-3 433 65654 4294967455 6.83 5.33 false bob van buren 2013-03-01 09:11:58.703199 29 yard duty +-3 438 65618 4294967398 62.39 4.62 false victor xylophone 2013-03-01 09:11:58.703135 88 values clariffication +-3 447 65755 4294967320 43.69 20.03 false victor hernandez 2013-03-01 09:11:58.703176 14 forestry +-3 448 65610 4294967314 81.97 31.11 true mike xylophone 2013-03-01 09:11:58.703308 79 opthamology +-3 451 65696 4294967532 6.8 40.07 false luke young 2013-03-01 09:11:58.703182 27 biology +-3 454 65627 4294967481 17.6 35.72 false bob underhill 2013-03-01 09:11:58.703188 67 religion +-3 454 65705 4294967468 62.12 14.32 true mike white 2013-03-01 09:11:58.703087 40 joggying +-3 454 65733 4294967544 73.83 18.42 false bob ichabod 2013-03-01 09:11:58.70324 96 debate +-3 455 65570 4294967304 2.48 30.76 false alice king 2013-03-01 09:11:58.70314 42 forestry +-3 458 65563 4294967315 62.77 41.5 false alice king 2013-03-01 09:11:58.703247 3 mathematics +-3 458 65679 4294967331 64.29 43.8 true irene young 2013-03-01 09:11:58.703084 3 american history +-3 458 65696 4294967418 45.24 8.49 false irene ellison 2013-03-01 09:11:58.703092 54 american history +-3 459 65644 4294967456 92.71 0.08 false jessica king 2013-03-01 09:11:58.703279 53 joggying +-3 465 65551 4294967457 83.39 46.64 true mike allen 2013-03-01 09:11:58.703292 53 values clariffication +-3 465 65735 4294967298 72.3 22.58 false bob underhill 2013-03-01 09:11:58.703176 81 joggying +-3 467 65575 4294967437 81.64 23.53 true tom hernandez 2013-03-01 09:11:58.703188 33 study skills +-3 469 65577 4294967451 88.78 32.96 true katie ichabod 2013-03-01 09:11:58.703139 69 undecided +-3 469 65698 4294967357 47.51 49.22 true david falkner 2013-03-01 09:11:58.703305 78 joggying +-3 469 65752 4294967350 55.41 32.11 true oscar johnson 2013-03-01 09:11:58.70311 47 philosophy +-3 477 65785 4294967464 97.51 10.84 true tom hernandez 2013-03-01 09:11:58.703108 7 history +-3 485 65661 4294967441 26.21 16.19 false alice xylophone 2013-03-01 09:11:58.703129 97 topology +-3 485 65669 4294967428 21.34 13.07 false priscilla zipper 2013-03-01 09:11:58.703321 28 quiet hour +-3 485 65684 4294967483 11.83 8.04 false david garcia 2013-03-01 09:11:58.70319 63 wind surfing +-3 493 65662 4294967482 28.75 30.21 false xavier garcia 2013-03-01 09:11:58.703194 5 education +-3 494 65589 4294967369 48.09 14.4 false jessica johnson 2013-03-01 09:11:58.703319 79 nap time +-3 498 65751 4294967331 80.65 0.28 true gabriella brown 2013-03-01 09:11:58.703288 61 opthamology +-3 500 65704 4294967480 2.26 28.79 true mike polk 2013-03-01 09:11:58.70319 4 nap time +-3 505 65565 4294967407 68.73 4.65 true holly nixon 2013-03-01 09:11:58.703262 15 debate +-3 507 65671 4294967305 60.28 41.5 false quinn polk 2013-03-01 09:11:58.703244 77 industrial engineering +-3 507 65728 4294967525 81.95 47.14 true rachel davidson 2013-03-01 09:11:58.703316 31 study skills +PREHOOK: query: select t, f, s from text_llap2 order by t, s, f limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select t, f, s from text_llap2 order by t, s, f limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 1.79 david falkner +-2 16.98 alice nixon +-2 19.69 bob xylophone +-2 20.94 bob van buren +-2 25.48 david laertes +-2 26.68 alice underhill +-2 48.3 alice xylophone +-2 6.49 alice underhill +-2 67.12 bob ovid +-2 76.54 calvin xylophone +-2 87.78 alice carson +-2 9.81 bob king +-2 94.57 david miller +-2 99.45 bob falkner +-3 0.56 rachel thompson +-3 0.83 oscar davidson +-3 10.26 ulysses quirinius +-3 11.07 oscar miller +-3 11.83 david garcia +-3 12.12 zach thompson +-3 13.67 wendy xylophone +-3 14.23 ethan underhill +-3 17.6 bob underhill +-3 18.27 zach young +-3 18.44 alice falkner +-3 19.94 quinn ellison +-3 2.26 mike polk +-3 2.48 alice king +-3 20.95 bob polk +-3 21.34 priscilla zipper +-3 22.0 priscilla laertes +-3 22.34 ulysses steinbeck +-3 22.35 bob johnson +-3 23.22 nick robinson +-3 25.5 wendy quirinius +-3 25.67 ulysses robinson +-3 26.21 alice xylophone +-3 28.75 xavier garcia +-3 28.78 xavier ovid +-3 29.06 calvin ichabod +-3 3.82 luke garcia +-3 34.6 ethan steinbeck +-3 38.05 david carson +-3 38.22 sarah xylophone +-3 43.4 victor allen +-3 43.69 victor hernandez +-3 43.73 oscar garcia +-3 45.24 irene ellison +-3 47.51 david falkner +-3 48.09 jessica johnson +-3 52.13 victor thompson +-3 52.3 calvin white +-3 55.41 oscar johnson +-3 55.59 luke king +-3 56.05 tom polk +-3 56.2 luke ichabod +-3 58.81 sarah miller +-3 59.07 bob falkner +-3 59.34 ulysses ellison +-3 6.8 luke young +-3 6.83 bob van buren +-3 60.28 quinn polk +-3 61.81 victor robinson +-3 62.12 mike white +-3 62.39 victor xylophone +-3 62.77 alice king +-3 62.81 jessica white +-3 63.19 priscilla quirinius +-3 63.49 holly steinbeck +-3 64.29 irene young +-3 65.5 yuri xylophone +-3 68.73 holly nixon +-3 7.1 ulysses carson +-3 71.78 wendy robinson +-3 71.97 alice davidson +-3 72.26 nick nixon +-3 72.3 bob underhill +-3 73.83 bob ichabod +-3 74.92 mike king +-3 76.31 alice allen +-3 8.37 bob ellison +-3 80.65 gabriella brown +-3 81.39 tom steinbeck +-3 81.64 tom hernandez +-3 81.68 david hernandez +-3 81.95 rachel davidson +-3 81.97 mike xylophone +-3 82.74 holly polk +-3 83.39 mike allen +-3 84.39 sarah robinson +-3 84.75 holly thompson +-3 85.96 jessica miller +-3 88.78 katie ichabod +-3 9.86 holly underhill +-3 90.3 irene underhill +-3 92.71 jessica king +-3 94.22 oscar johnson +-3 96.78 fred ellison +-3 97.51 tom hernandez +-3 97.88 sarah zipper +PREHOOK: query: select ctinyint, cstring1, cboolean2 from text_llap100 order by ctinyint, cstring1, cboolean2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select ctinyint, cstring1, cboolean2 from text_llap100 order by ctinyint, cstring1, cboolean2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-1 cvLH6Eat2yFsyy7p NULL +-11 cvLH6Eat2yFsyy7p NULL +-11 cvLH6Eat2yFsyy7p NULL +-11 cvLH6Eat2yFsyy7p NULL +-12 cvLH6Eat2yFsyy7p NULL +-13 cvLH6Eat2yFsyy7p NULL +-16 cvLH6Eat2yFsyy7p NULL +-16 cvLH6Eat2yFsyy7p NULL +-19 cvLH6Eat2yFsyy7p NULL +-21 cvLH6Eat2yFsyy7p NULL +-21 cvLH6Eat2yFsyy7p NULL +-22 cvLH6Eat2yFsyy7p NULL +-22 cvLH6Eat2yFsyy7p NULL +-22 cvLH6Eat2yFsyy7p NULL +-23 cvLH6Eat2yFsyy7p NULL +-23 cvLH6Eat2yFsyy7p NULL +-23 cvLH6Eat2yFsyy7p NULL +-24 cvLH6Eat2yFsyy7p NULL +-28 cvLH6Eat2yFsyy7p NULL +-28 cvLH6Eat2yFsyy7p NULL +-30 cvLH6Eat2yFsyy7p NULL +-32 cvLH6Eat2yFsyy7p NULL +-33 cvLH6Eat2yFsyy7p NULL +-33 cvLH6Eat2yFsyy7p NULL +-34 cvLH6Eat2yFsyy7p NULL +-34 cvLH6Eat2yFsyy7p NULL +-36 cvLH6Eat2yFsyy7p NULL +-37 cvLH6Eat2yFsyy7p NULL +-4 cvLH6Eat2yFsyy7p NULL +-4 cvLH6Eat2yFsyy7p NULL +-40 cvLH6Eat2yFsyy7p NULL +-43 cvLH6Eat2yFsyy7p NULL +-44 cvLH6Eat2yFsyy7p NULL +-45 cvLH6Eat2yFsyy7p NULL +-45 cvLH6Eat2yFsyy7p NULL +-47 cvLH6Eat2yFsyy7p NULL +-48 cvLH6Eat2yFsyy7p NULL +-48 cvLH6Eat2yFsyy7p NULL +-5 cvLH6Eat2yFsyy7p NULL +-5 cvLH6Eat2yFsyy7p NULL +-5 cvLH6Eat2yFsyy7p NULL +-50 cvLH6Eat2yFsyy7p NULL +-51 cvLH6Eat2yFsyy7p NULL +-53 cvLH6Eat2yFsyy7p NULL +-54 cvLH6Eat2yFsyy7p NULL +-55 cvLH6Eat2yFsyy7p NULL +-55 cvLH6Eat2yFsyy7p NULL +-56 cvLH6Eat2yFsyy7p NULL +-56 cvLH6Eat2yFsyy7p NULL +-57 cvLH6Eat2yFsyy7p NULL +-59 cvLH6Eat2yFsyy7p NULL +-62 cvLH6Eat2yFsyy7p NULL +-7 cvLH6Eat2yFsyy7p NULL +0 cvLH6Eat2yFsyy7p NULL +0 cvLH6Eat2yFsyy7p NULL +10 cvLH6Eat2yFsyy7p NULL +13 cvLH6Eat2yFsyy7p NULL +16 cvLH6Eat2yFsyy7p NULL +18 cvLH6Eat2yFsyy7p NULL +19 cvLH6Eat2yFsyy7p NULL +2 cvLH6Eat2yFsyy7p NULL +21 cvLH6Eat2yFsyy7p NULL +24 cvLH6Eat2yFsyy7p NULL +24 cvLH6Eat2yFsyy7p NULL +26 cvLH6Eat2yFsyy7p NULL +27 cvLH6Eat2yFsyy7p NULL +27 cvLH6Eat2yFsyy7p NULL +28 cvLH6Eat2yFsyy7p NULL +29 cvLH6Eat2yFsyy7p NULL +29 cvLH6Eat2yFsyy7p NULL +30 cvLH6Eat2yFsyy7p NULL +31 cvLH6Eat2yFsyy7p NULL +31 cvLH6Eat2yFsyy7p NULL +34 cvLH6Eat2yFsyy7p NULL +34 cvLH6Eat2yFsyy7p NULL +36 cvLH6Eat2yFsyy7p NULL +36 cvLH6Eat2yFsyy7p NULL +38 cvLH6Eat2yFsyy7p NULL +38 cvLH6Eat2yFsyy7p NULL +38 cvLH6Eat2yFsyy7p NULL +39 cvLH6Eat2yFsyy7p NULL +4 cvLH6Eat2yFsyy7p NULL +40 cvLH6Eat2yFsyy7p NULL +40 cvLH6Eat2yFsyy7p NULL +41 cvLH6Eat2yFsyy7p NULL +43 cvLH6Eat2yFsyy7p NULL +46 cvLH6Eat2yFsyy7p NULL +5 cvLH6Eat2yFsyy7p NULL +51 cvLH6Eat2yFsyy7p NULL +51 cvLH6Eat2yFsyy7p NULL +53 cvLH6Eat2yFsyy7p NULL +53 cvLH6Eat2yFsyy7p NULL +61 cvLH6Eat2yFsyy7p NULL +61 cvLH6Eat2yFsyy7p NULL +61 cvLH6Eat2yFsyy7p NULL +62 cvLH6Eat2yFsyy7p NULL +8 cvLH6Eat2yFsyy7p NULL +9 cvLH6Eat2yFsyy7p NULL +NULL cvLH6Eat2yFsyy7p NULL +NULL cvLH6Eat2yFsyy7p NULL +PREHOOK: query: select * from text_llap100 order by cint, cstring1, cstring2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select * from text_llap100 order by cint, cstring1, cstring2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-1 -75 528534767 NULL -1.389 -863.257 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.331 1969-12-31 16:00:07.585 true NULL +-11 -15431 528534767 NULL -11.0 -15431.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.176 1969-12-31 16:00:07.787 true NULL +-11 7476 528534767 NULL -11.0 7476.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.551 1969-12-31 15:59:57.567 true NULL +-11 9472 528534767 NULL -11.0 9472.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:47.917 1969-12-31 16:00:03.716 true NULL +-12 -2013 528534767 NULL -12.0 -2013.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:14.907 1969-12-31 15:59:58.789 true NULL +-13 -13372 528534767 NULL -13.0 -13372.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.499 1969-12-31 15:59:48.221 true NULL +-16 -6922 528534767 NULL -16.0 -6922.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.402 1969-12-31 15:59:50.561 true NULL +-16 -7964 528534767 NULL -16.0 -7964.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.035 1969-12-31 16:00:12.464 true NULL +-19 1206 528534767 NULL -19.0 1206.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.587 1969-12-31 16:00:08.381 true NULL +-21 -7183 528534767 NULL -21.0 -7183.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.035 1969-12-31 16:00:06.182 true NULL +-21 3168 528534767 NULL -21.0 3168.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.834 1969-12-31 16:00:13.331 true NULL +-22 3856 528534767 NULL -22.0 3856.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:48.508 1969-12-31 15:59:54.534 true NULL +-22 77 528534767 NULL -22.0 77.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.928 1969-12-31 15:59:43.621 true NULL +-22 8499 528534767 NULL -22.0 8499.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:15.626 1969-12-31 16:00:10.923 true NULL +-23 -10154 528534767 NULL -23.0 -10154.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.088 1969-12-31 15:59:56.086 true NULL +-23 13026 528534767 NULL -23.0 13026.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:14.625 1969-12-31 16:00:10.77 true NULL +-23 4587 528534767 NULL -23.0 4587.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.732 1969-12-31 15:59:48.52 true NULL +-24 163 528534767 NULL -24.0 163.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.51 1969-12-31 16:00:04.014 true NULL +-28 -15813 528534767 NULL -28.0 -15813.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.787 1969-12-31 16:00:01.546 true NULL +-28 6453 528534767 NULL -28.0 6453.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.475 1969-12-31 16:00:07.828 true NULL +-30 834 528534767 NULL -30.0 834.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:14.072 1969-12-31 16:00:03.004 true NULL +-32 11242 528534767 NULL -32.0 11242.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.091 1969-12-31 15:59:55.681 true NULL +-33 14072 528534767 NULL -33.0 14072.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:03.168 1969-12-31 15:59:55.836 true NULL +-33 7350 528534767 NULL -33.0 7350.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.952 1969-12-31 15:59:48.183 true NULL +-34 15007 528534767 NULL -34.0 15007.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:50.434 1969-12-31 16:00:13.352 true NULL +-34 4181 528534767 NULL -34.0 4181.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:06.557 1969-12-31 16:00:04.869 true NULL +-36 1639 528534767 NULL -36.0 1639.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.186 1969-12-31 16:00:13.098 true NULL +-37 -12472 528534767 NULL -37.0 -12472.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.3 1969-12-31 15:59:55.998 true NULL +-4 -1027 528534767 NULL -4.0 -1027.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:46.628 1969-12-31 16:00:11.413 true NULL +-4 2617 528534767 NULL -4.0 2617.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.21 1969-12-31 15:59:44.733 true NULL +-40 -4463 528534767 NULL -40.0 -4463.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.647 1969-12-31 15:59:46.254 true NULL +-43 486 528534767 NULL -43.0 486.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.345 1969-12-31 15:59:52.667 true NULL +-44 -1299 528534767 NULL -44.0 -1299.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.163 1969-12-31 15:59:47.687 true NULL +-45 -14072 528534767 NULL -45.0 -14072.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.621 1969-12-31 15:59:45.914 true NULL +-45 5521 528534767 NULL -45.0 5521.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.01 1969-12-31 15:59:48.553 true NULL +-47 -2468 528534767 NULL -47.0 -2468.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:48.68 1969-12-31 16:00:02.94 true NULL +-48 -7735 528534767 NULL -48.0 -7735.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.472 1969-12-31 16:00:00.8 true NULL +-48 13300 528534767 NULL -48.0 13300.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.077 1969-12-31 15:59:45.827 true NULL +-5 -13229 528534767 NULL -5.0 -13229.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.834 1969-12-31 16:00:00.388 true NULL +-5 -14379 528534767 NULL -5.0 -14379.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.037 1969-12-31 15:59:49.141 true NULL +-5 12422 528534767 NULL -5.0 12422.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.745 1969-12-31 15:59:48.802 true NULL +-50 -13326 528534767 NULL -50.0 -13326.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:46.674 1969-12-31 16:00:08.875 true NULL +-51 -12083 528534767 NULL -51.0 -12083.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.026 1969-12-31 16:00:02.52 true NULL +-53 -3419 528534767 NULL -53.0 -3419.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:46.771 1969-12-31 15:59:53.744 true NULL +-54 -10268 528534767 NULL -54.0 -10268.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:53.417 1969-12-31 16:00:00.687 true NULL +-55 -7353 528534767 NULL -55.0 -7353.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.941 1969-12-31 15:59:54.268 true NULL +-55 -7449 528534767 NULL -55.0 -7449.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.846 1969-12-31 15:59:55.75 true NULL +-56 8353 528534767 NULL -56.0 8353.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:11.242 1969-12-31 15:59:46.526 true NULL +-56 8402 528534767 NULL -56.0 8402.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.01 1969-12-31 16:00:05.146 true NULL +-57 -11492 528534767 NULL -57.0 -11492.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.261 1969-12-31 16:00:05.306 true NULL +-59 10688 528534767 NULL -59.0 10688.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.746 1969-12-31 16:00:15.489 true NULL +-62 10 528534767 NULL -62.0 10.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.265 1969-12-31 15:59:56.584 true NULL +-7 2541 528534767 NULL -7.0 2541.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.353 1969-12-31 15:59:57.374 true NULL +0 -3166 528534767 NULL 0.0 -3166.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:10.688 1969-12-31 16:00:01.385 true NULL +0 15626 528534767 NULL 0.0 15626.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.566 1969-12-31 16:00:15.217 true NULL +10 9366 528534767 NULL 10.0 9366.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.358 1969-12-31 15:59:50.592 true NULL +13 1358 528534767 NULL 13.0 1358.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:06.453 1969-12-31 16:00:00.423 true NULL +16 5780 528534767 NULL 16.0 5780.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.451 1969-12-31 16:00:12.752 true NULL +18 -3045 528534767 NULL 18.0 -3045.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:47.829 1969-12-31 16:00:05.045 true NULL +19 7952 528534767 NULL 19.0 7952.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:12.161 1969-12-31 16:00:00.95 true NULL +2 1345 528534767 NULL 2.0 1345.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.333 1969-12-31 16:00:00.517 true NULL +21 11737 528534767 NULL 21.0 11737.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.537 1969-12-31 15:59:45.022 true NULL +24 -4812 528534767 NULL 24.0 -4812.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.86 1969-12-31 15:59:55 true NULL +24 4432 528534767 NULL 24.0 4432.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:02.541 1969-12-31 16:00:10.895 true NULL +26 3961 528534767 NULL 26.0 3961.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:57.987 1969-12-31 15:59:52.232 true NULL +27 -14965 528534767 NULL 27.0 -14965.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:12.422 1969-12-31 16:00:09.517 true NULL +27 -7824 528534767 NULL 27.0 -7824.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.963 1969-12-31 15:59:56.474 true NULL +28 8035 528534767 NULL 28.0 8035.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:03.856 1969-12-31 15:59:55.95 true NULL +29 -1990 528534767 NULL 29.0 -1990.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:06.958 1969-12-31 15:59:52.902 true NULL +29 7021 528534767 NULL 29.0 7021.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:15.007 1969-12-31 16:00:15.148 true NULL +30 -814 528534767 NULL 30.0 -814.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.955 1969-12-31 16:00:11.799 true NULL +31 -9566 528534767 NULL 31.0 -9566.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.187 1969-12-31 16:00:06.961 true NULL +31 4963 528534767 NULL 31.0 4963.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.021 1969-12-31 16:00:02.997 true NULL +34 -15059 528534767 NULL 34.0 -15059.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.639 1969-12-31 16:00:13.206 true NULL +34 -4255 528534767 NULL 34.0 -4255.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.581 1969-12-31 15:59:57.88 true NULL +36 -15912 528534767 NULL 36.0 -15912.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.432 1969-12-31 16:00:04.376 true NULL +36 14907 528534767 NULL 36.0 14907.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:47.528 1969-12-31 15:59:47.206 true NULL +38 -11320 528534767 NULL 38.0 -11320.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.169 1969-12-31 16:00:03.822 true NULL +38 -4667 528534767 NULL 38.0 -4667.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.366 1969-12-31 15:59:52.334 true NULL +38 -6583 528534767 NULL 38.0 -6583.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:53.078 1969-12-31 16:00:06.722 true NULL +39 -10909 528534767 NULL 39.0 -10909.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.276 1969-12-31 16:00:12.738 true NULL +4 -14739 528534767 NULL 4.0 -14739.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.188 1969-12-31 16:00:15.26 true NULL +40 -1724 528534767 NULL 40.0 -1724.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:05.521 1969-12-31 15:59:57.835 true NULL +40 -7984 528534767 NULL 40.0 -7984.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.206 1969-12-31 16:00:02.59 true NULL +41 37 528534767 NULL 41.0 37.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.817 1969-12-31 15:59:53.672 true NULL +43 1475 528534767 NULL 43.0 1475.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.988 1969-12-31 16:00:03.442 true NULL +46 6958 528534767 NULL 46.0 6958.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.036 1969-12-31 16:00:10.191 true NULL +5 14625 528534767 NULL 5.0 14625.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:05.78 1969-12-31 16:00:15.34 true NULL +51 -15790 528534767 NULL 51.0 -15790.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.871 1969-12-31 15:59:57.821 true NULL +51 -4490 528534767 NULL 51.0 -4490.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.476 1969-12-31 15:59:49.318 true NULL +53 -10129 528534767 NULL 53.0 -10129.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.181 1969-12-31 16:00:08.061 true NULL +53 -12171 528534767 NULL 53.0 -12171.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.35 1969-12-31 15:59:57.549 true NULL +61 -1254 528534767 NULL 61.0 -1254.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:11.737 1969-12-31 16:00:12.004 true NULL +61 -15549 528534767 NULL 61.0 -15549.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.569 1969-12-31 15:59:51.665 true NULL +61 12161 528534767 NULL 61.0 12161.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:02.617 1969-12-31 16:00:10.536 true NULL +62 6557 528534767 NULL 62.0 6557.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.016 1969-12-31 16:00:00.367 true NULL +8 7860 528534767 NULL 8.0 7860.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.701 1969-12-31 16:00:01.97 true NULL +9 9169 528534767 NULL 9.0 9169.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:03.961 1969-12-31 16:00:14.126 true NULL +NULL -3012 528534767 NULL NULL -3012.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 16:00:03.756 true NULL +NULL -4213 528534767 NULL NULL -4213.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 16:00:13.589 true NULL +PREHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-10129 cvLH6Eat2yFsyy7p NULL +-10154 cvLH6Eat2yFsyy7p NULL +-10268 cvLH6Eat2yFsyy7p NULL +-1027 cvLH6Eat2yFsyy7p NULL +-10909 cvLH6Eat2yFsyy7p NULL +-11320 cvLH6Eat2yFsyy7p NULL +-11492 cvLH6Eat2yFsyy7p NULL +-12083 cvLH6Eat2yFsyy7p NULL +-12171 cvLH6Eat2yFsyy7p NULL +-12472 cvLH6Eat2yFsyy7p NULL +-1254 cvLH6Eat2yFsyy7p NULL +-1299 cvLH6Eat2yFsyy7p NULL +-13229 cvLH6Eat2yFsyy7p NULL +-13326 cvLH6Eat2yFsyy7p NULL +-13372 cvLH6Eat2yFsyy7p NULL +-14072 cvLH6Eat2yFsyy7p NULL +-14379 cvLH6Eat2yFsyy7p NULL +-14739 cvLH6Eat2yFsyy7p NULL +-14965 cvLH6Eat2yFsyy7p NULL +-15059 cvLH6Eat2yFsyy7p NULL +-15431 cvLH6Eat2yFsyy7p NULL +-15549 cvLH6Eat2yFsyy7p NULL +-15790 cvLH6Eat2yFsyy7p NULL +-15813 cvLH6Eat2yFsyy7p NULL +-15912 cvLH6Eat2yFsyy7p NULL +-1724 cvLH6Eat2yFsyy7p NULL +-1990 cvLH6Eat2yFsyy7p NULL +-2013 cvLH6Eat2yFsyy7p NULL +-2468 cvLH6Eat2yFsyy7p NULL +-3012 cvLH6Eat2yFsyy7p NULL +-3045 cvLH6Eat2yFsyy7p NULL +-3166 cvLH6Eat2yFsyy7p NULL +-3419 cvLH6Eat2yFsyy7p NULL +-4213 cvLH6Eat2yFsyy7p NULL +-4255 cvLH6Eat2yFsyy7p NULL +-4463 cvLH6Eat2yFsyy7p NULL +-4490 cvLH6Eat2yFsyy7p NULL +-4667 cvLH6Eat2yFsyy7p NULL +-4812 cvLH6Eat2yFsyy7p NULL +-6583 cvLH6Eat2yFsyy7p NULL +-6922 cvLH6Eat2yFsyy7p NULL +-7183 cvLH6Eat2yFsyy7p NULL +-7353 cvLH6Eat2yFsyy7p NULL +-7449 cvLH6Eat2yFsyy7p NULL +-75 cvLH6Eat2yFsyy7p NULL +-7735 cvLH6Eat2yFsyy7p NULL +-7824 cvLH6Eat2yFsyy7p NULL +-7964 cvLH6Eat2yFsyy7p NULL +-7984 cvLH6Eat2yFsyy7p NULL +-814 cvLH6Eat2yFsyy7p NULL +-9566 cvLH6Eat2yFsyy7p NULL +10 cvLH6Eat2yFsyy7p NULL +10688 cvLH6Eat2yFsyy7p NULL +11242 cvLH6Eat2yFsyy7p NULL +11737 cvLH6Eat2yFsyy7p NULL +1206 cvLH6Eat2yFsyy7p NULL +12161 cvLH6Eat2yFsyy7p NULL +12422 cvLH6Eat2yFsyy7p NULL +13026 cvLH6Eat2yFsyy7p NULL +13300 cvLH6Eat2yFsyy7p NULL +1345 cvLH6Eat2yFsyy7p NULL +1358 cvLH6Eat2yFsyy7p NULL +14072 cvLH6Eat2yFsyy7p NULL +14625 cvLH6Eat2yFsyy7p NULL +1475 cvLH6Eat2yFsyy7p NULL +14907 cvLH6Eat2yFsyy7p NULL +15007 cvLH6Eat2yFsyy7p NULL +15626 cvLH6Eat2yFsyy7p NULL +163 cvLH6Eat2yFsyy7p NULL +1639 cvLH6Eat2yFsyy7p NULL +2541 cvLH6Eat2yFsyy7p NULL +2617 cvLH6Eat2yFsyy7p NULL +3168 cvLH6Eat2yFsyy7p NULL +37 cvLH6Eat2yFsyy7p NULL +3856 cvLH6Eat2yFsyy7p NULL +3961 cvLH6Eat2yFsyy7p NULL +4181 cvLH6Eat2yFsyy7p NULL +4432 cvLH6Eat2yFsyy7p NULL +4587 cvLH6Eat2yFsyy7p NULL +486 cvLH6Eat2yFsyy7p NULL +4963 cvLH6Eat2yFsyy7p NULL +5521 cvLH6Eat2yFsyy7p NULL +5780 cvLH6Eat2yFsyy7p NULL +6453 cvLH6Eat2yFsyy7p NULL +6557 cvLH6Eat2yFsyy7p NULL +6958 cvLH6Eat2yFsyy7p NULL +7021 cvLH6Eat2yFsyy7p NULL +7350 cvLH6Eat2yFsyy7p NULL +7476 cvLH6Eat2yFsyy7p NULL +77 cvLH6Eat2yFsyy7p NULL +7860 cvLH6Eat2yFsyy7p NULL +7952 cvLH6Eat2yFsyy7p NULL +8035 cvLH6Eat2yFsyy7p NULL +834 cvLH6Eat2yFsyy7p NULL +8353 cvLH6Eat2yFsyy7p NULL +8402 cvLH6Eat2yFsyy7p NULL +8499 cvLH6Eat2yFsyy7p NULL +9169 cvLH6Eat2yFsyy7p NULL +9366 cvLH6Eat2yFsyy7p NULL +9472 cvLH6Eat2yFsyy7p NULL +PREHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 alice carson 2013-03-01 09:11:58.703074 +-2 alice nixon 2013-03-01 09:11:58.703321 +-2 alice underhill 2013-03-01 09:11:58.703122 +-2 alice underhill 2013-03-01 09:11:58.703127 +-2 alice xylophone 2013-03-01 09:11:58.703105 +-2 bob falkner 2013-03-01 09:11:58.703071 +-2 bob king 2013-03-01 09:11:58.703236 +-2 bob ovid 2013-03-01 09:11:58.703285 +-2 bob van buren 2013-03-01 09:11:58.703218 +-2 bob xylophone 2013-03-01 09:11:58.703219 +-2 calvin xylophone 2013-03-01 09:11:58.703083 +-2 david falkner 2013-03-01 09:11:58.703254 +-2 david laertes 2013-03-01 09:11:58.703076 +-2 david miller 2013-03-01 09:11:58.703238 +-3 alice allen 2013-03-01 09:11:58.703323 +-3 alice davidson 2013-03-01 09:11:58.703226 +-3 alice falkner 2013-03-01 09:11:58.703304 +-3 alice king 2013-03-01 09:11:58.70314 +-3 alice king 2013-03-01 09:11:58.703247 +-3 alice xylophone 2013-03-01 09:11:58.703129 +-3 bob ellison 2013-03-01 09:11:58.703261 +-3 bob falkner 2013-03-01 09:11:58.70328 +-3 bob ichabod 2013-03-01 09:11:58.70324 +-3 bob johnson 2013-03-01 09:11:58.703204 +-3 bob polk 2013-03-01 09:11:58.703128 +-3 bob underhill 2013-03-01 09:11:58.703176 +-3 bob underhill 2013-03-01 09:11:58.703188 +-3 bob van buren 2013-03-01 09:11:58.703199 +-3 calvin ichabod 2013-03-01 09:11:58.703213 +-3 calvin white 2013-03-01 09:11:58.703295 +-3 david carson 2013-03-01 09:11:58.703136 +-3 david falkner 2013-03-01 09:11:58.703305 +-3 david garcia 2013-03-01 09:11:58.70319 +-3 david hernandez 2013-03-01 09:11:58.703252 +-3 ethan steinbeck 2013-03-01 09:11:58.703079 +-3 ethan underhill 2013-03-01 09:11:58.703138 +-3 fred ellison 2013-03-01 09:11:58.703233 +-3 gabriella brown 2013-03-01 09:11:58.703288 +-3 holly nixon 2013-03-01 09:11:58.703262 +-3 holly polk 2013-03-01 09:11:58.703273 +-3 holly steinbeck 2013-03-01 09:11:58.703242 +-3 holly thompson 2013-03-01 09:11:58.703073 +-3 holly underhill 2013-03-01 09:11:58.703219 +-3 irene ellison 2013-03-01 09:11:58.703092 +-3 irene underhill 2013-03-01 09:11:58.703298 +-3 irene young 2013-03-01 09:11:58.703084 +-3 jessica johnson 2013-03-01 09:11:58.703319 +-3 jessica king 2013-03-01 09:11:58.703279 +-3 jessica miller 2013-03-01 09:11:58.703245 +-3 jessica white 2013-03-01 09:11:58.703199 +-3 katie ichabod 2013-03-01 09:11:58.703139 +-3 luke garcia 2013-03-01 09:11:58.703076 +-3 luke ichabod 2013-03-01 09:11:58.703294 +-3 luke king 2013-03-01 09:11:58.703207 +-3 luke young 2013-03-01 09:11:58.703182 +-3 mike allen 2013-03-01 09:11:58.703292 +-3 mike king 2013-03-01 09:11:58.703214 +-3 mike polk 2013-03-01 09:11:58.70319 +-3 mike white 2013-03-01 09:11:58.703087 +-3 mike xylophone 2013-03-01 09:11:58.703308 +-3 nick nixon 2013-03-01 09:11:58.703083 +-3 nick robinson 2013-03-01 09:11:58.703147 +-3 oscar davidson 2013-03-01 09:11:58.703071 +-3 oscar garcia 2013-03-01 09:11:58.703282 +-3 oscar johnson 2013-03-01 09:11:58.70311 +-3 oscar johnson 2013-03-01 09:11:58.703133 +-3 oscar miller 2013-03-01 09:11:58.70332 +-3 priscilla laertes 2013-03-01 09:11:58.70325 +-3 priscilla quirinius 2013-03-01 09:11:58.703228 +-3 priscilla zipper 2013-03-01 09:11:58.703321 +-3 quinn ellison 2013-03-01 09:11:58.703232 +-3 quinn polk 2013-03-01 09:11:58.703244 +-3 rachel davidson 2013-03-01 09:11:58.703316 +-3 rachel thompson 2013-03-01 09:11:58.703276 +-3 sarah miller 2013-03-01 09:11:58.70316 +-3 sarah robinson 2013-03-01 09:11:58.703288 +-3 sarah xylophone 2013-03-01 09:11:58.703112 +-3 sarah zipper 2013-03-01 09:11:58.703289 +-3 tom hernandez 2013-03-01 09:11:58.703108 +-3 tom hernandez 2013-03-01 09:11:58.703188 +-3 tom polk 2013-03-01 09:11:58.703217 +-3 tom steinbeck 2013-03-01 09:11:58.703251 +-3 ulysses carson 2013-03-01 09:11:58.703253 +-3 ulysses ellison 2013-03-01 09:11:58.703197 +-3 ulysses quirinius 2013-03-01 09:11:58.703189 +-3 ulysses robinson 2013-03-01 09:11:58.703227 +-3 ulysses steinbeck 2013-03-01 09:11:58.703259 +-3 victor allen 2013-03-01 09:11:58.703155 +-3 victor hernandez 2013-03-01 09:11:58.703176 +-3 victor robinson 2013-03-01 09:11:58.703305 +-3 victor thompson 2013-03-01 09:11:58.703299 +-3 victor xylophone 2013-03-01 09:11:58.703135 +-3 wendy quirinius 2013-03-01 09:11:58.703266 +-3 wendy robinson 2013-03-01 09:11:58.703294 +-3 wendy xylophone 2013-03-01 09:11:58.703191 +-3 xavier garcia 2013-03-01 09:11:58.703194 +-3 xavier ovid 2013-03-01 09:11:58.703148 +-3 yuri xylophone 2013-03-01 09:11:58.703258 +-3 zach thompson 2013-03-01 09:11:58.703252 +-3 zach young 2013-03-01 09:11:58.703191 +PREHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-10129 cvLH6Eat2yFsyy7p NULL +-10154 cvLH6Eat2yFsyy7p NULL +-10268 cvLH6Eat2yFsyy7p NULL +-1027 cvLH6Eat2yFsyy7p NULL +-10909 cvLH6Eat2yFsyy7p NULL +-11320 cvLH6Eat2yFsyy7p NULL +-11492 cvLH6Eat2yFsyy7p NULL +-12083 cvLH6Eat2yFsyy7p NULL +-12171 cvLH6Eat2yFsyy7p NULL +-12472 cvLH6Eat2yFsyy7p NULL +-1254 cvLH6Eat2yFsyy7p NULL +-1299 cvLH6Eat2yFsyy7p NULL +-13229 cvLH6Eat2yFsyy7p NULL +-13326 cvLH6Eat2yFsyy7p NULL +-13372 cvLH6Eat2yFsyy7p NULL +-14072 cvLH6Eat2yFsyy7p NULL +-14379 cvLH6Eat2yFsyy7p NULL +-14739 cvLH6Eat2yFsyy7p NULL +-14965 cvLH6Eat2yFsyy7p NULL +-15059 cvLH6Eat2yFsyy7p NULL +-15431 cvLH6Eat2yFsyy7p NULL +-15549 cvLH6Eat2yFsyy7p NULL +-15790 cvLH6Eat2yFsyy7p NULL +-15813 cvLH6Eat2yFsyy7p NULL +-15912 cvLH6Eat2yFsyy7p NULL +-1724 cvLH6Eat2yFsyy7p NULL +-1990 cvLH6Eat2yFsyy7p NULL +-2013 cvLH6Eat2yFsyy7p NULL +-2468 cvLH6Eat2yFsyy7p NULL +-3012 cvLH6Eat2yFsyy7p NULL +-3045 cvLH6Eat2yFsyy7p NULL +-3166 cvLH6Eat2yFsyy7p NULL +-3419 cvLH6Eat2yFsyy7p NULL +-4213 cvLH6Eat2yFsyy7p NULL +-4255 cvLH6Eat2yFsyy7p NULL +-4463 cvLH6Eat2yFsyy7p NULL +-4490 cvLH6Eat2yFsyy7p NULL +-4667 cvLH6Eat2yFsyy7p NULL +-4812 cvLH6Eat2yFsyy7p NULL +-6583 cvLH6Eat2yFsyy7p NULL +-6922 cvLH6Eat2yFsyy7p NULL +-7183 cvLH6Eat2yFsyy7p NULL +-7353 cvLH6Eat2yFsyy7p NULL +-7449 cvLH6Eat2yFsyy7p NULL +-75 cvLH6Eat2yFsyy7p NULL +-7735 cvLH6Eat2yFsyy7p NULL +-7824 cvLH6Eat2yFsyy7p NULL +-7964 cvLH6Eat2yFsyy7p NULL +-7984 cvLH6Eat2yFsyy7p NULL +-814 cvLH6Eat2yFsyy7p NULL +-9566 cvLH6Eat2yFsyy7p NULL +10 cvLH6Eat2yFsyy7p NULL +10688 cvLH6Eat2yFsyy7p NULL +11242 cvLH6Eat2yFsyy7p NULL +11737 cvLH6Eat2yFsyy7p NULL +1206 cvLH6Eat2yFsyy7p NULL +12161 cvLH6Eat2yFsyy7p NULL +12422 cvLH6Eat2yFsyy7p NULL +13026 cvLH6Eat2yFsyy7p NULL +13300 cvLH6Eat2yFsyy7p NULL +1345 cvLH6Eat2yFsyy7p NULL +1358 cvLH6Eat2yFsyy7p NULL +14072 cvLH6Eat2yFsyy7p NULL +14625 cvLH6Eat2yFsyy7p NULL +1475 cvLH6Eat2yFsyy7p NULL +14907 cvLH6Eat2yFsyy7p NULL +15007 cvLH6Eat2yFsyy7p NULL +15626 cvLH6Eat2yFsyy7p NULL +163 cvLH6Eat2yFsyy7p NULL +1639 cvLH6Eat2yFsyy7p NULL +2541 cvLH6Eat2yFsyy7p NULL +2617 cvLH6Eat2yFsyy7p NULL +3168 cvLH6Eat2yFsyy7p NULL +37 cvLH6Eat2yFsyy7p NULL +3856 cvLH6Eat2yFsyy7p NULL +3961 cvLH6Eat2yFsyy7p NULL +4181 cvLH6Eat2yFsyy7p NULL +4432 cvLH6Eat2yFsyy7p NULL +4587 cvLH6Eat2yFsyy7p NULL +486 cvLH6Eat2yFsyy7p NULL +4963 cvLH6Eat2yFsyy7p NULL +5521 cvLH6Eat2yFsyy7p NULL +5780 cvLH6Eat2yFsyy7p NULL +6453 cvLH6Eat2yFsyy7p NULL +6557 cvLH6Eat2yFsyy7p NULL +6958 cvLH6Eat2yFsyy7p NULL +7021 cvLH6Eat2yFsyy7p NULL +7350 cvLH6Eat2yFsyy7p NULL +7476 cvLH6Eat2yFsyy7p NULL +77 cvLH6Eat2yFsyy7p NULL +7860 cvLH6Eat2yFsyy7p NULL +7952 cvLH6Eat2yFsyy7p NULL +8035 cvLH6Eat2yFsyy7p NULL +834 cvLH6Eat2yFsyy7p NULL +8353 cvLH6Eat2yFsyy7p NULL +8402 cvLH6Eat2yFsyy7p NULL +8499 cvLH6Eat2yFsyy7p NULL +9169 cvLH6Eat2yFsyy7p NULL +9366 cvLH6Eat2yFsyy7p NULL +9472 cvLH6Eat2yFsyy7p NULL +PREHOOK: query: DROP TABLE text_llap +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@text_llap +PREHOOK: Output: default@text_llap +POSTHOOK: query: DROP TABLE text_llap +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@text_llap +POSTHOOK: Output: default@text_llap diff --git a/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out b/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out new file mode 100644 index 0000000..6900cdb --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out @@ -0,0 +1,283 @@ +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: drop table llap_temp_table +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table llap_temp_table +POSTHOOK: type: DROPTABLE +PREHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cbigint is not null +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cbigint is not null +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cint > 10) and cbigint is not null) (type: boolean) + Filter Operator + predicate: ((cint > 10) and cbigint is not null) (type: boolean) + Select Operator + expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean), cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 + ListSink + +PREHOOK: query: create table llap_temp_table as +select * from orc_llap_n0 where cint > 10 and cbigint is not null +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: database:default +PREHOOK: Output: default@llap_temp_table +POSTHOOK: query: create table llap_temp_table as +select * from orc_llap_n0 where cint > 10 and cbigint is not null +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@llap_temp_table +POSTHOOK: Lineage: llap_temp_table.cbigint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cboolean1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cboolean2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cdecimal1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cdecimal1, type:decimal(10,2), comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cdecimal2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cdecimal2, type:decimal(38,5), comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cdouble SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cfloat SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.csmallint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cstring1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cstring2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.ctimestamp1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.ctimestamp2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.ctinyint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: select sum(hash(*)) from llap_temp_table +PREHOOK: type: QUERY +PREHOOK: Input: default@llap_temp_table +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from llap_temp_table +POSTHOOK: type: QUERY +POSTHOOK: Input: default@llap_temp_table +#### A masked pattern was here #### +212787774304 +PREHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cint < 5000000 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cint < 5000000 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cint > 10) and (cint < 5000000)) (type: boolean) + Filter Operator + predicate: ((cint < 5000000) and (cint > 10)) (type: boolean) + Select Operator + expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean), cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 + ListSink + +PREHOOK: query: select * from orc_llap_n0 where cint > 10 and cint < 5000000 +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +POSTHOOK: query: select * from orc_llap_n0 where cint > 10 and cint < 5000000 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +-51 NULL 6981 707684071 -51.0 NULL YdG61y00526u5 G71l66F25 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 762 1587111633 -51.0 NULL q5y2Vy1 UbUx5 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 6981 -1569596201 -51.0 NULL o4lvY20511w0EOX3P3I82p63 J6YIW3yQlW3GydlRm 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 2949963 -1580871111 -51.0 NULL 0K68k3bdl7jO7 TPPAu 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 2089466 -240556350 -51.0 NULL cXX24dH7tblSj46j2g C31eea0wrHHqvj 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +-51 NULL 6981 -471484665 -51.0 NULL 4KhrrQ0nJ7bMNTvhSCA R31tq72k1528DQ5C3Y4cNub 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 762 -755927849 -51.0 NULL a10E76jX35YwquKCTA s7473frMk58vm 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +NULL 1016 3432650 1864027286 NULL 1016.0 0SPVSOVDI73t 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.364 false true 3.35 5.56789 +NULL 10144 4756105 1864027286 NULL 10144.0 bvoO6VwRmH6181mdOm87Do 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.134 true true 3.35 5.56789 +NULL 10653 3887593 1864027286 NULL 10653.0 2wak50xB5nHswbX 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:48.858 false true 3.35 5.56789 +NULL 10782 1286921 1864027286 NULL 10782.0 ODLrXI8882q8LS8 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.138 true true 3.35 5.56789 +NULL 197 762 1864027286 NULL 2563.58 3WsVeqb28VWEEOLI8ail 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:45.603 true true 3.35 5.56789 +NULL 1535 86028 1864027286 NULL 1535.0 T2o8XRFAL0HC4ikDQnfoCymw 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.662 true true 3.35 5.56789 +NULL 5064 504142 1864027286 NULL 5064.0 PlOxor04p5cvVl 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:09.828 true true 3.35 5.56789 +NULL -3799 1248059 1864027286 NULL -3799.0 Uhps6mMh3IfHB3j7yH62K 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.622 false true 3.35 5.56789 +NULL 10299 799471 1864027286 NULL 10299.0 2fu24 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.516 false true 3.35 5.56789 +NULL -8915 2101183 1864027286 NULL -8915.0 x7By66525 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:05.831 false true 3.35 5.56789 +8 NULL 2433892 -1611863517 8.0 NULL 674ILv3V2TxFqXP6wSbL VLprkK2XfX 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 3073556 332961835 8.0 NULL rR855m18hps5nkaFqE43W pH15gLf8B4yNFDWFH74 1969-12-31 16:00:15.892 NULL true true 3.35 5.56789 +8 NULL 6981 627355276 8.0 NULL K630vaVf 7gDn3I45FGIX0J6JH74PCEN 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 2229621 -381406148 8.0 NULL q7onkS7QRPh5ghOK oKb0bi 1969-12-31 16:00:15.892 NULL true false 3.35 5.56789 +NULL 359 6981 -1887561756 NULL 9763215.5639 sF2CRfgt2K 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:12.489 false false 3.35 5.56789 +NULL -12328 3253295 -1887561756 NULL -12328.0 Ut5NYg5XWb 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:57.985 true false 3.35 5.56789 +11 NULL 1000828 1531084669 11.0 NULL wM316f6NqGIkoP388j3F6 poWQQo3Upvt3Wh 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -1908387379 11.0 NULL a3EhVU6Wuy7ycJ7wY7h2gv 0542kSCNs54o7tD6e2YuI3 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 1310786 -413875656 11.0 NULL W0rvA4H1xn0xMG4uk0 8yVVjG 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -667592125 11.0 NULL NULL xIVF2uu7 1969-12-31 16:00:02.351 NULL NULL true 3.35 5.56789 +11 NULL 3583612 -1172590956 11.0 NULL hrSdTD2Q05 mJ5nwN6o4s8Hi4 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 6981 1532810435 11.0 NULL Y5x3JuI3M8jngv5N L760FuvYP 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 4972984 -483828108 11.0 NULL Sf45K8ueb68jp6s8 jPWX6Wr4fmTBSc5HSlX1r 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 762 -1005594359 11.0 NULL BLoMwUJ51ns6pd FtT7S 1969-12-31 16:00:02.351 NULL false false 3.35 5.56789 +NULL 359 762 -1645852809 NULL 9763215.5639 40ks5556SV xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:55.352 false false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 o5mb0QP5Y48Qd4vdB0 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.062 true false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 1FNNhmiFLGw425NA13g xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.463 false false 3.35 5.56789 +NULL -13036 1288927 -1645852809 NULL -13036.0 yinBY725P7V2 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:00.763 true false 3.35 5.56789 +-51 NULL 6981 707684071 -51.0 NULL YdG61y00526u5 G71l66F25 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 762 1587111633 -51.0 NULL q5y2Vy1 UbUx5 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 6981 -1569596201 -51.0 NULL o4lvY20511w0EOX3P3I82p63 J6YIW3yQlW3GydlRm 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 2949963 -1580871111 -51.0 NULL 0K68k3bdl7jO7 TPPAu 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 2089466 -240556350 -51.0 NULL cXX24dH7tblSj46j2g C31eea0wrHHqvj 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +-51 NULL 6981 -471484665 -51.0 NULL 4KhrrQ0nJ7bMNTvhSCA R31tq72k1528DQ5C3Y4cNub 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 762 -755927849 -51.0 NULL a10E76jX35YwquKCTA s7473frMk58vm 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +NULL 1016 3432650 1864027286 NULL 1016.0 0SPVSOVDI73t 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.364 false true 3.35 5.56789 +NULL 10144 4756105 1864027286 NULL 10144.0 bvoO6VwRmH6181mdOm87Do 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.134 true true 3.35 5.56789 +NULL 10653 3887593 1864027286 NULL 10653.0 2wak50xB5nHswbX 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:48.858 false true 3.35 5.56789 +NULL 10782 1286921 1864027286 NULL 10782.0 ODLrXI8882q8LS8 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.138 true true 3.35 5.56789 +NULL 197 762 1864027286 NULL 2563.58 3WsVeqb28VWEEOLI8ail 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:45.603 true true 3.35 5.56789 +NULL 1535 86028 1864027286 NULL 1535.0 T2o8XRFAL0HC4ikDQnfoCymw 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.662 true true 3.35 5.56789 +NULL 5064 504142 1864027286 NULL 5064.0 PlOxor04p5cvVl 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:09.828 true true 3.35 5.56789 +NULL -3799 1248059 1864027286 NULL -3799.0 Uhps6mMh3IfHB3j7yH62K 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.622 false true 3.35 5.56789 +NULL 10299 799471 1864027286 NULL 10299.0 2fu24 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.516 false true 3.35 5.56789 +NULL -8915 2101183 1864027286 NULL -8915.0 x7By66525 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:05.831 false true 3.35 5.56789 +8 NULL 2433892 -1611863517 8.0 NULL 674ILv3V2TxFqXP6wSbL VLprkK2XfX 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 3073556 332961835 8.0 NULL rR855m18hps5nkaFqE43W pH15gLf8B4yNFDWFH74 1969-12-31 16:00:15.892 NULL true true 3.35 5.56789 +8 NULL 6981 627355276 8.0 NULL K630vaVf 7gDn3I45FGIX0J6JH74PCEN 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 2229621 -381406148 8.0 NULL q7onkS7QRPh5ghOK oKb0bi 1969-12-31 16:00:15.892 NULL true false 3.35 5.56789 +NULL 359 6981 -1887561756 NULL 9763215.5639 sF2CRfgt2K 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:12.489 false false 3.35 5.56789 +NULL -12328 3253295 -1887561756 NULL -12328.0 Ut5NYg5XWb 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:57.985 true false 3.35 5.56789 +11 NULL 1000828 1531084669 11.0 NULL wM316f6NqGIkoP388j3F6 poWQQo3Upvt3Wh 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -1908387379 11.0 NULL a3EhVU6Wuy7ycJ7wY7h2gv 0542kSCNs54o7tD6e2YuI3 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 1310786 -413875656 11.0 NULL W0rvA4H1xn0xMG4uk0 8yVVjG 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -667592125 11.0 NULL NULL xIVF2uu7 1969-12-31 16:00:02.351 NULL NULL true 3.35 5.56789 +11 NULL 3583612 -1172590956 11.0 NULL hrSdTD2Q05 mJ5nwN6o4s8Hi4 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 6981 1532810435 11.0 NULL Y5x3JuI3M8jngv5N L760FuvYP 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 4972984 -483828108 11.0 NULL Sf45K8ueb68jp6s8 jPWX6Wr4fmTBSc5HSlX1r 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 762 -1005594359 11.0 NULL BLoMwUJ51ns6pd FtT7S 1969-12-31 16:00:02.351 NULL false false 3.35 5.56789 +NULL 359 762 -1645852809 NULL 9763215.5639 40ks5556SV xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:55.352 false false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 o5mb0QP5Y48Qd4vdB0 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.062 true false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 1FNNhmiFLGw425NA13g xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.463 false false 3.35 5.56789 +NULL -13036 1288927 -1645852809 NULL -13036.0 yinBY725P7V2 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:00.763 true false 3.35 5.56789 +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: drop table llap_temp_table +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@llap_temp_table +PREHOOK: Output: default@llap_temp_table +POSTHOOK: query: drop table llap_temp_table +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@llap_temp_table +POSTHOOK: Output: default@llap_temp_table diff --git a/ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out b/ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out index b361b1e..b3b2dcc 100644 --- a/ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out +++ b/ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out @@ -91,8 +91,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -133,8 +133,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -270,8 +270,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -312,8 +312,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out index 5837963..9a70096 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out @@ -50,7 +50,7 @@ Table Parameters: numFiles 1 numRows 5 rawDataSize 1025 - totalSize 503 + totalSize 501 #### A masked pattern was here #### # Storage Information @@ -111,7 +111,7 @@ Table Parameters: numFiles 1 numRows 5 rawDataSize 580 - totalSize 348 + totalSize 345 #### A masked pattern was here #### # Storage Information @@ -247,7 +247,7 @@ key value numFiles 1 numRows 5 rawDataSize 1605 -totalSize 702 +totalSize 703 #### A masked pattern was here #### PREHOOK: query: drop materialized view cmv_mat_view_n4 PREHOOK: type: DROP_MATERIALIZED_VIEW diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out index 10039cc..3d5acca 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out @@ -400,7 +400,7 @@ Table Type: MATERIALIZED_VIEW Table Parameters: bucketing_version 2 numFiles 2 - totalSize 1078 + totalSize 1076 transactional true transactional_properties default #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out index 7cf7132..85092a0 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out @@ -73,7 +73,7 @@ Table Parameters: numFiles 1 numRows 5 rawDataSize 580 - totalSize 348 + totalSize 345 #### A masked pattern was here #### # Storage Information @@ -100,7 +100,7 @@ key foo numFiles 1 numRows 5 rawDataSize 580 -totalSize 348 +totalSize 345 #### A masked pattern was here #### PREHOOK: query: select a, c from cmv_mat_view_n8 PREHOOK: type: QUERY @@ -242,7 +242,7 @@ Table Parameters: numFiles 1 numRows 5 rawDataSize 1025 - totalSize 503 + totalSize 501 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/llap/mergejoin.q.out b/ql/src/test/results/clientpositive/llap/mergejoin.q.out index 832ed48..b240b11 100644 --- a/ql/src/test/results/clientpositive/llap/mergejoin.q.out +++ b/ql/src/test/results/clientpositive/llap/mergejoin.q.out @@ -64,8 +64,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -148,8 +147,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -445,8 +443,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -498,8 +496,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1620,8 +1618,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1665,8 +1663,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1816,8 +1814,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1861,8 +1859,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2012,8 +2010,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2057,8 +2055,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2220,8 +2218,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2304,8 +2302,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2356,8 +2353,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2618,8 +2615,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2671,8 +2668,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2834,8 +2831,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2887,8 +2884,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2940,8 +2937,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2993,8 +2990,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3151,8 +3148,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3204,8 +3201,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3365,8 +3362,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3449,8 +3446,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3501,8 +3497,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3771,8 +3767,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3824,8 +3820,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3877,8 +3873,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3930,8 +3926,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4101,8 +4097,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4153,8 +4149,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/orc_create.q.out b/ql/src/test/results/clientpositive/llap/orc_create.q.out index 5aa43a8..423e51c 100644 --- a/ql/src/test/results/clientpositive/llap/orc_create.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_create.q.out @@ -449,7 +449,7 @@ PREHOOK: query: CREATE TABLE orc_create_people_staging ( first_name string, last_name string, address string, - salary decimal, + salary decimal(38,0), start_date timestamp, state string) PREHOOK: type: CREATETABLE @@ -460,7 +460,7 @@ POSTHOOK: query: CREATE TABLE orc_create_people_staging ( first_name string, last_name string, address string, - salary decimal, + salary decimal(38,0), start_date timestamp, state string) POSTHOOK: type: CREATETABLE @@ -481,7 +481,7 @@ PREHOOK: query: CREATE TABLE orc_create_people ( first_name string, last_name string, address string, - salary decimal, + salary decimal(38,0), start_date timestamp) PARTITIONED BY (state string) STORED AS orc @@ -493,7 +493,7 @@ POSTHOOK: query: CREATE TABLE orc_create_people ( first_name string, last_name string, address string, - salary decimal, + salary decimal(38,0), start_date timestamp) PARTITIONED BY (state string) STORED AS orc @@ -515,13 +515,13 @@ POSTHOOK: Lineage: orc_create_people PARTITION(state=Ca).address SIMPLE [(orc_cr POSTHOOK: Lineage: orc_create_people PARTITION(state=Ca).first_name SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:first_name, type:string, comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Ca).id SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:id, type:int, comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Ca).last_name SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:last_name, type:string, comment:null), ] -POSTHOOK: Lineage: orc_create_people PARTITION(state=Ca).salary SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:salary, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_create_people PARTITION(state=Ca).salary SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:salary, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Ca).start_date SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:start_date, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).address SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:address, type:string, comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).first_name SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:first_name, type:string, comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).id SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:id, type:int, comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).last_name SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:last_name, type:string, comment:null), ] -POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).salary SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:salary, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).salary SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:salary, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).start_date SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:start_date, type:timestamp, comment:null), ] PREHOOK: query: SELECT COUNT(*) FROM orc_create_people where id < 10 and state = 'Ca' PREHOOK: type: QUERY diff --git a/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out b/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out index c4fe46e..5305c3d 100644 --- a/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out @@ -233,7 +233,7 @@ Table Parameters: orc.bloom.filter.columns * orc.row.index.stride 1000 rawDataSize 1139514 - totalSize 55453 + totalSize 55243 #### A masked pattern was here #### # Storage Information @@ -251,7 +251,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n1 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16676 + HDFS_BYTES_READ: 16671 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 7 HDFS_LARGE_READ_OPS: 0 diff --git a/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out b/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out index 6dec42f..28f383e 100644 --- a/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out @@ -233,7 +233,7 @@ Table Parameters: orc.bloom.filter.columns * orc.row.index.stride 1000 rawDataSize 1139514 - totalSize 55453 + totalSize 55243 #### A masked pattern was here #### # Storage Information @@ -251,7 +251,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17731 + HDFS_BYTES_READ: 17726 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 8 HDFS_LARGE_READ_OPS: 0 diff --git a/ql/src/test/results/clientpositive/llap/orc_merge11.q.out b/ql/src/test/results/clientpositive/llap/orc_merge11.q.out index 1b2ddd3..8e7840c 100644 --- a/ql/src/test/results/clientpositive/llap/orc_merge11.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_merge11.q.out @@ -6,11 +6,11 @@ PREHOOK: query: DROP TABLE orc_split_elim_n0 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE orc_split_elim_n0 POSTHOOK: type: DROPTABLE -PREHOOK: query: create table orc_split_elim_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_split_elim_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_split_elim_n0 -POSTHOOK: query: create table orc_split_elim_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_split_elim_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_split_elim_n0 @@ -30,36 +30,36 @@ POSTHOOK: query: load data local inpath '../../data/files/orc_split_elim.orc' in POSTHOOK: type: LOAD #### A masked pattern was here #### POSTHOOK: Output: default@orc_split_elim_n0 -PREHOOK: query: create table orcfile_merge1_n2 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc tblproperties("orc.compress.size"="4096") +PREHOOK: query: create table orcfile_merge1_n2 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc tblproperties("orc.compress.size"="4096") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orcfile_merge1_n2 -POSTHOOK: query: create table orcfile_merge1_n2 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc tblproperties("orc.compress.size"="4096") +POSTHOOK: query: create table orcfile_merge1_n2 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc tblproperties("orc.compress.size"="4096") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orcfile_merge1_n2 -PREHOOK: query: insert overwrite table orcfile_merge1_n2 select * from orc_split_elim_n0 +PREHOOK: query: insert overwrite table orcfile_merge1_n2 select * from orc_split_elim_n0 order by userid PREHOOK: type: QUERY PREHOOK: Input: default@orc_split_elim_n0 PREHOOK: Output: default@orcfile_merge1_n2 -POSTHOOK: query: insert overwrite table orcfile_merge1_n2 select * from orc_split_elim_n0 +POSTHOOK: query: insert overwrite table orcfile_merge1_n2 select * from orc_split_elim_n0 order by userid POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_split_elim_n0 POSTHOOK: Output: default@orcfile_merge1_n2 -POSTHOOK: Lineage: orcfile_merge1_n2.decimal1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orcfile_merge1_n2.decimal1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.string1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.subtype SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.ts SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.userid SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:userid, type:bigint, comment:null), ] -PREHOOK: query: insert into table orcfile_merge1_n2 select * from orc_split_elim_n0 +PREHOOK: query: insert into table orcfile_merge1_n2 select * from orc_split_elim_n0 order by userid PREHOOK: type: QUERY PREHOOK: Input: default@orc_split_elim_n0 PREHOOK: Output: default@orcfile_merge1_n2 -POSTHOOK: query: insert into table orcfile_merge1_n2 select * from orc_split_elim_n0 +POSTHOOK: query: insert into table orcfile_merge1_n2 select * from orc_split_elim_n0 order by userid POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_split_elim_n0 POSTHOOK: Output: default@orcfile_merge1_n2 -POSTHOOK: Lineage: orcfile_merge1_n2.decimal1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orcfile_merge1_n2.decimal1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.string1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.subtype SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.ts SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -76,42 +76,42 @@ File Version: 0.12 with ORC_135 Rows: 50000 Compression: ZLIB Compression size: 4096 -Type: struct +Type: struct Stripe Statistics: Stripe 1: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 File Statistics: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 Stripes: - Stripe: offset: 3 data: 5897 rows: 50000 tail: 113 index: 497 + Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433 Stream: column 0 section ROW_INDEX start: 3 length 17 - Stream: column 1 section ROW_INDEX start: 20 length 83 - Stream: column 2 section ROW_INDEX start: 103 length 81 - Stream: column 3 section ROW_INDEX start: 184 length 111 - Stream: column 4 section ROW_INDEX start: 295 length 110 - Stream: column 5 section ROW_INDEX start: 405 length 95 - Stream: column 1 section DATA start: 500 length 45 - Stream: column 2 section DATA start: 545 length 41 - Stream: column 2 section LENGTH start: 586 length 8 - Stream: column 2 section DICTIONARY_DATA start: 594 length 23 - Stream: column 3 section DATA start: 617 length 5167 - Stream: column 4 section DATA start: 5784 length 524 - Stream: column 4 section SECONDARY start: 6308 length 18 - Stream: column 5 section DATA start: 6326 length 53 - Stream: column 5 section SECONDARY start: 6379 length 18 + Stream: column 1 section ROW_INDEX start: 20 length 73 + Stream: column 2 section ROW_INDEX start: 93 length 79 + Stream: column 3 section ROW_INDEX start: 172 length 85 + Stream: column 4 section ROW_INDEX start: 257 length 92 + Stream: column 5 section ROW_INDEX start: 349 length 87 + Stream: column 1 section DATA start: 436 length 30 + Stream: column 2 section DATA start: 466 length 24 + Stream: column 2 section LENGTH start: 490 length 8 + Stream: column 2 section DICTIONARY_DATA start: 498 length 23 + Stream: column 3 section DATA start: 521 length 5114 + Stream: column 4 section DATA start: 5635 length 480 + Stream: column 4 section SECONDARY start: 6115 length 18 + Stream: column 5 section DATA start: 6133 length 46 + Stream: column 5 section SECONDARY start: 6179 length 18 Encoding column 0: DIRECT Encoding column 1: DIRECT_V2 Encoding column 2: DICTIONARY_V2[6] @@ -125,37 +125,37 @@ Stripes: Entry 3: count: 10000 hasNull: false positions: Entry 4: count: 10000 hasNull: false positions: Row group indices for column 1: - Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999815 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: 29 max: 100 sum: 999899 positions: 0,101,391 - Entry 2: count: 10000 hasNull: false min: 2 max: 100 sum: 999807 positions: 0,207,391 - Entry 3: count: 10000 hasNull: false min: 13 max: 100 sum: 999842 positions: 0,313,391 - Entry 4: count: 10000 hasNull: false min: 5 max: 100 sum: 999875 positions: 0,419,391 + Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999238 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,107,262 + Entry 2: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,207,22 + Entry 3: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,302,294 + Entry 4: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,402,54 Row group indices for column 2: - Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: cat max: zebra sum: 49996 positions: 0,82,391 - Entry 2: count: 10000 hasNull: false min: eat max: zebra sum: 49996 positions: 0,168,391 - Entry 3: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,254,391 - Entry 4: count: 10000 hasNull: false min: dog max: zebra sum: 49996 positions: 0,340,391 + Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49980 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,83,262 + Entry 2: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,163,22 + Entry 3: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,239,294 + Entry 4: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,319,54 Row group indices for column 3: - Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80064.8 positions: 0,0 - Entry 1: count: 10000 hasNull: false min: 1.8 max: 8.0 sum: 79993.8 positions: 1002,2176 - Entry 2: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79985.6 positions: 2053,256 - Entry 3: count: 10000 hasNull: false min: 8.0 max: 80.0 sum: 80072.0 positions: 3067,2432 - Entry 4: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79986.6 positions: 4117,512 + Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80102.8 positions: 0,0 + Entry 1: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 1017,2176 + Entry 2: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 2057,256 + Entry 3: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 3045,2432 + Entry 4: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 4085,512 Row group indices for column 4: - Entry 0: count: 10000 hasNull: false min: 0 max: 2 sum: 3 positions: 0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 0 max: 4 sum: 7 positions: 83,1808,0,76,272 - Entry 2: count: 10000 hasNull: false min: 0 max: 6 sum: 7 positions: 167,3616,0,156,32 - Entry 3: count: 10000 hasNull: false min: 0 max: 3 sum: 5 positions: 290,1328,0,232,304 - Entry 4: count: 10000 hasNull: false min: 0 max: 6 sum: 10 positions: 380,3136,0,312,64 + Entry 0: count: 10000 hasNull: false min: 0 max: 6 sum: 32 positions: 0,0,0,0,0 + Entry 1: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 84,1808,0,76,272 + Entry 2: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 160,3616,0,156,32 + Entry 3: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 274,1328,0,232,304 + Entry 4: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 350,3136,0,312,64 Row group indices for column 5: Entry 0: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:00:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,164,391,0,76,272 - Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,336,391,0,156,32 - Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:00:05.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:05.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,508,391,0,232,304 - Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:00:15.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:15.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,680,391,0,312,64 + Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,194,262,0,76,272 + Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,354,22,0,156,32 + Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304 + Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64 -File length: 6890 bytes +File length: 6685 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ @@ -167,42 +167,42 @@ File Version: 0.12 with ORC_135 Rows: 50000 Compression: ZLIB Compression size: 4096 -Type: struct +Type: struct Stripe Statistics: Stripe 1: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 File Statistics: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 Stripes: - Stripe: offset: 3 data: 5897 rows: 50000 tail: 113 index: 497 + Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433 Stream: column 0 section ROW_INDEX start: 3 length 17 - Stream: column 1 section ROW_INDEX start: 20 length 83 - Stream: column 2 section ROW_INDEX start: 103 length 81 - Stream: column 3 section ROW_INDEX start: 184 length 111 - Stream: column 4 section ROW_INDEX start: 295 length 110 - Stream: column 5 section ROW_INDEX start: 405 length 95 - Stream: column 1 section DATA start: 500 length 45 - Stream: column 2 section DATA start: 545 length 41 - Stream: column 2 section LENGTH start: 586 length 8 - Stream: column 2 section DICTIONARY_DATA start: 594 length 23 - Stream: column 3 section DATA start: 617 length 5167 - Stream: column 4 section DATA start: 5784 length 524 - Stream: column 4 section SECONDARY start: 6308 length 18 - Stream: column 5 section DATA start: 6326 length 53 - Stream: column 5 section SECONDARY start: 6379 length 18 + Stream: column 1 section ROW_INDEX start: 20 length 73 + Stream: column 2 section ROW_INDEX start: 93 length 79 + Stream: column 3 section ROW_INDEX start: 172 length 85 + Stream: column 4 section ROW_INDEX start: 257 length 92 + Stream: column 5 section ROW_INDEX start: 349 length 87 + Stream: column 1 section DATA start: 436 length 30 + Stream: column 2 section DATA start: 466 length 24 + Stream: column 2 section LENGTH start: 490 length 8 + Stream: column 2 section DICTIONARY_DATA start: 498 length 23 + Stream: column 3 section DATA start: 521 length 5114 + Stream: column 4 section DATA start: 5635 length 480 + Stream: column 4 section SECONDARY start: 6115 length 18 + Stream: column 5 section DATA start: 6133 length 46 + Stream: column 5 section SECONDARY start: 6179 length 18 Encoding column 0: DIRECT Encoding column 1: DIRECT_V2 Encoding column 2: DICTIONARY_V2[6] @@ -216,37 +216,37 @@ Stripes: Entry 3: count: 10000 hasNull: false positions: Entry 4: count: 10000 hasNull: false positions: Row group indices for column 1: - Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999815 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: 29 max: 100 sum: 999899 positions: 0,101,391 - Entry 2: count: 10000 hasNull: false min: 2 max: 100 sum: 999807 positions: 0,207,391 - Entry 3: count: 10000 hasNull: false min: 13 max: 100 sum: 999842 positions: 0,313,391 - Entry 4: count: 10000 hasNull: false min: 5 max: 100 sum: 999875 positions: 0,419,391 + Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999238 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,107,262 + Entry 2: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,207,22 + Entry 3: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,302,294 + Entry 4: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,402,54 Row group indices for column 2: - Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: cat max: zebra sum: 49996 positions: 0,82,391 - Entry 2: count: 10000 hasNull: false min: eat max: zebra sum: 49996 positions: 0,168,391 - Entry 3: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,254,391 - Entry 4: count: 10000 hasNull: false min: dog max: zebra sum: 49996 positions: 0,340,391 + Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49980 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,83,262 + Entry 2: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,163,22 + Entry 3: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,239,294 + Entry 4: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,319,54 Row group indices for column 3: - Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80064.8 positions: 0,0 - Entry 1: count: 10000 hasNull: false min: 1.8 max: 8.0 sum: 79993.8 positions: 1002,2176 - Entry 2: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79985.6 positions: 2053,256 - Entry 3: count: 10000 hasNull: false min: 8.0 max: 80.0 sum: 80072.0 positions: 3067,2432 - Entry 4: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79986.6 positions: 4117,512 + Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80102.8 positions: 0,0 + Entry 1: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 1017,2176 + Entry 2: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 2057,256 + Entry 3: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 3045,2432 + Entry 4: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 4085,512 Row group indices for column 4: - Entry 0: count: 10000 hasNull: false min: 0 max: 2 sum: 3 positions: 0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 0 max: 4 sum: 7 positions: 83,1808,0,76,272 - Entry 2: count: 10000 hasNull: false min: 0 max: 6 sum: 7 positions: 167,3616,0,156,32 - Entry 3: count: 10000 hasNull: false min: 0 max: 3 sum: 5 positions: 290,1328,0,232,304 - Entry 4: count: 10000 hasNull: false min: 0 max: 6 sum: 10 positions: 380,3136,0,312,64 + Entry 0: count: 10000 hasNull: false min: 0 max: 6 sum: 32 positions: 0,0,0,0,0 + Entry 1: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 84,1808,0,76,272 + Entry 2: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 160,3616,0,156,32 + Entry 3: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 274,1328,0,232,304 + Entry 4: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 350,3136,0,312,64 Row group indices for column 5: Entry 0: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:00:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,164,391,0,76,272 - Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,336,391,0,156,32 - Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:00:05.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:05.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,508,391,0,232,304 - Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:00:15.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:15.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,680,391,0,312,64 + Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,194,262,0,76,272 + Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,354,22,0,156,32 + Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304 + Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64 -File length: 6890 bytes +File length: 6685 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ @@ -279,49 +279,49 @@ File Version: 0.12 with ORC_135 Rows: 100000 Compression: ZLIB Compression size: 4096 -Type: struct +Type: struct Stripe Statistics: Stripe 1: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 Stripe 2: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 File Statistics: Column 0: count: 100000 hasNull: false - Column 1: count: 100000 hasNull: false bytesOnDisk: 90 min: 2 max: 100 sum: 9998476 - Column 2: count: 100000 hasNull: false bytesOnDisk: 144 min: bar max: zebra sum: 499960 - Column 3: count: 100000 hasNull: false bytesOnDisk: 10334 min: 0.8 max: 80.0 sum: 800205.6000000001 - Column 4: count: 100000 hasNull: false bytesOnDisk: 1084 min: 0 max: 6 sum: 64 - Column 5: count: 100000 hasNull: false bytesOnDisk: 142 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 100000 hasNull: false bytesOnDisk: 60 min: 2 max: 100 sum: 9998476 + Column 2: count: 100000 hasNull: false bytesOnDisk: 110 min: bar max: zebra sum: 499960 + Column 3: count: 100000 hasNull: false bytesOnDisk: 10228 min: 0.8 max: 80.0 sum: 800205.6 + Column 4: count: 100000 hasNull: false bytesOnDisk: 996 min: 0 max: 6 sum: 64 + Column 5: count: 100000 hasNull: false bytesOnDisk: 128 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 Stripes: - Stripe: offset: 3 data: 5897 rows: 50000 tail: 113 index: 497 + Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433 Stream: column 0 section ROW_INDEX start: 3 length 17 - Stream: column 1 section ROW_INDEX start: 20 length 83 - Stream: column 2 section ROW_INDEX start: 103 length 81 - Stream: column 3 section ROW_INDEX start: 184 length 111 - Stream: column 4 section ROW_INDEX start: 295 length 110 - Stream: column 5 section ROW_INDEX start: 405 length 95 - Stream: column 1 section DATA start: 500 length 45 - Stream: column 2 section DATA start: 545 length 41 - Stream: column 2 section LENGTH start: 586 length 8 - Stream: column 2 section DICTIONARY_DATA start: 594 length 23 - Stream: column 3 section DATA start: 617 length 5167 - Stream: column 4 section DATA start: 5784 length 524 - Stream: column 4 section SECONDARY start: 6308 length 18 - Stream: column 5 section DATA start: 6326 length 53 - Stream: column 5 section SECONDARY start: 6379 length 18 + Stream: column 1 section ROW_INDEX start: 20 length 73 + Stream: column 2 section ROW_INDEX start: 93 length 79 + Stream: column 3 section ROW_INDEX start: 172 length 85 + Stream: column 4 section ROW_INDEX start: 257 length 92 + Stream: column 5 section ROW_INDEX start: 349 length 87 + Stream: column 1 section DATA start: 436 length 30 + Stream: column 2 section DATA start: 466 length 24 + Stream: column 2 section LENGTH start: 490 length 8 + Stream: column 2 section DICTIONARY_DATA start: 498 length 23 + Stream: column 3 section DATA start: 521 length 5114 + Stream: column 4 section DATA start: 5635 length 480 + Stream: column 4 section SECONDARY start: 6115 length 18 + Stream: column 5 section DATA start: 6133 length 46 + Stream: column 5 section SECONDARY start: 6179 length 18 Encoding column 0: DIRECT Encoding column 1: DIRECT_V2 Encoding column 2: DICTIONARY_V2[6] @@ -335,51 +335,51 @@ Stripes: Entry 3: count: 10000 hasNull: false positions: Entry 4: count: 10000 hasNull: false positions: Row group indices for column 1: - Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999815 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: 29 max: 100 sum: 999899 positions: 0,101,391 - Entry 2: count: 10000 hasNull: false min: 2 max: 100 sum: 999807 positions: 0,207,391 - Entry 3: count: 10000 hasNull: false min: 13 max: 100 sum: 999842 positions: 0,313,391 - Entry 4: count: 10000 hasNull: false min: 5 max: 100 sum: 999875 positions: 0,419,391 + Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999238 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,107,262 + Entry 2: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,207,22 + Entry 3: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,302,294 + Entry 4: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,402,54 Row group indices for column 2: - Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: cat max: zebra sum: 49996 positions: 0,82,391 - Entry 2: count: 10000 hasNull: false min: eat max: zebra sum: 49996 positions: 0,168,391 - Entry 3: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,254,391 - Entry 4: count: 10000 hasNull: false min: dog max: zebra sum: 49996 positions: 0,340,391 + Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49980 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,83,262 + Entry 2: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,163,22 + Entry 3: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,239,294 + Entry 4: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,319,54 Row group indices for column 3: - Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80064.8 positions: 0,0 - Entry 1: count: 10000 hasNull: false min: 1.8 max: 8.0 sum: 79993.8 positions: 1002,2176 - Entry 2: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79985.6 positions: 2053,256 - Entry 3: count: 10000 hasNull: false min: 8.0 max: 80.0 sum: 80072.0 positions: 3067,2432 - Entry 4: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79986.6 positions: 4117,512 + Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80102.8 positions: 0,0 + Entry 1: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 1017,2176 + Entry 2: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 2057,256 + Entry 3: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 3045,2432 + Entry 4: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 4085,512 Row group indices for column 4: - Entry 0: count: 10000 hasNull: false min: 0 max: 2 sum: 3 positions: 0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 0 max: 4 sum: 7 positions: 83,1808,0,76,272 - Entry 2: count: 10000 hasNull: false min: 0 max: 6 sum: 7 positions: 167,3616,0,156,32 - Entry 3: count: 10000 hasNull: false min: 0 max: 3 sum: 5 positions: 290,1328,0,232,304 - Entry 4: count: 10000 hasNull: false min: 0 max: 6 sum: 10 positions: 380,3136,0,312,64 + Entry 0: count: 10000 hasNull: false min: 0 max: 6 sum: 32 positions: 0,0,0,0,0 + Entry 1: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 84,1808,0,76,272 + Entry 2: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 160,3616,0,156,32 + Entry 3: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 274,1328,0,232,304 + Entry 4: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 350,3136,0,312,64 Row group indices for column 5: Entry 0: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:00:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,164,391,0,76,272 - Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,336,391,0,156,32 - Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:00:05.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:05.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,508,391,0,232,304 - Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:00:15.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:15.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,680,391,0,312,64 - Stripe: offset: 6510 data: 5897 rows: 50000 tail: 113 index: 497 - Stream: column 0 section ROW_INDEX start: 6510 length 17 - Stream: column 1 section ROW_INDEX start: 6527 length 83 - Stream: column 2 section ROW_INDEX start: 6610 length 81 - Stream: column 3 section ROW_INDEX start: 6691 length 111 - Stream: column 4 section ROW_INDEX start: 6802 length 110 - Stream: column 5 section ROW_INDEX start: 6912 length 95 - Stream: column 1 section DATA start: 7007 length 45 - Stream: column 2 section DATA start: 7052 length 41 - Stream: column 2 section LENGTH start: 7093 length 8 - Stream: column 2 section DICTIONARY_DATA start: 7101 length 23 - Stream: column 3 section DATA start: 7124 length 5167 - Stream: column 4 section DATA start: 12291 length 524 - Stream: column 4 section SECONDARY start: 12815 length 18 - Stream: column 5 section DATA start: 12833 length 53 - Stream: column 5 section SECONDARY start: 12886 length 18 + Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,194,262,0,76,272 + Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,354,22,0,156,32 + Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304 + Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64 + Stripe: offset: 6309 data: 5761 rows: 50000 tail: 112 index: 433 + Stream: column 0 section ROW_INDEX start: 6309 length 17 + Stream: column 1 section ROW_INDEX start: 6326 length 73 + Stream: column 2 section ROW_INDEX start: 6399 length 79 + Stream: column 3 section ROW_INDEX start: 6478 length 85 + Stream: column 4 section ROW_INDEX start: 6563 length 92 + Stream: column 5 section ROW_INDEX start: 6655 length 87 + Stream: column 1 section DATA start: 6742 length 30 + Stream: column 2 section DATA start: 6772 length 24 + Stream: column 2 section LENGTH start: 6796 length 8 + Stream: column 2 section DICTIONARY_DATA start: 6804 length 23 + Stream: column 3 section DATA start: 6827 length 5114 + Stream: column 4 section DATA start: 11941 length 480 + Stream: column 4 section SECONDARY start: 12421 length 18 + Stream: column 5 section DATA start: 12439 length 46 + Stream: column 5 section SECONDARY start: 12485 length 18 Encoding column 0: DIRECT Encoding column 1: DIRECT_V2 Encoding column 2: DICTIONARY_V2[6] @@ -393,37 +393,37 @@ Stripes: Entry 3: count: 10000 hasNull: false positions: Entry 4: count: 10000 hasNull: false positions: Row group indices for column 1: - Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999815 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: 29 max: 100 sum: 999899 positions: 0,101,391 - Entry 2: count: 10000 hasNull: false min: 2 max: 100 sum: 999807 positions: 0,207,391 - Entry 3: count: 10000 hasNull: false min: 13 max: 100 sum: 999842 positions: 0,313,391 - Entry 4: count: 10000 hasNull: false min: 5 max: 100 sum: 999875 positions: 0,419,391 + Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999238 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,107,262 + Entry 2: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,207,22 + Entry 3: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,302,294 + Entry 4: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,402,54 Row group indices for column 2: - Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: cat max: zebra sum: 49996 positions: 0,82,391 - Entry 2: count: 10000 hasNull: false min: eat max: zebra sum: 49996 positions: 0,168,391 - Entry 3: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,254,391 - Entry 4: count: 10000 hasNull: false min: dog max: zebra sum: 49996 positions: 0,340,391 + Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49980 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,83,262 + Entry 2: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,163,22 + Entry 3: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,239,294 + Entry 4: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,319,54 Row group indices for column 3: - Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80064.8 positions: 0,0 - Entry 1: count: 10000 hasNull: false min: 1.8 max: 8.0 sum: 79993.8 positions: 1002,2176 - Entry 2: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79985.6 positions: 2053,256 - Entry 3: count: 10000 hasNull: false min: 8.0 max: 80.0 sum: 80072.0 positions: 3067,2432 - Entry 4: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79986.6 positions: 4117,512 + Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80102.8 positions: 0,0 + Entry 1: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 1017,2176 + Entry 2: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 2057,256 + Entry 3: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 3045,2432 + Entry 4: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 4085,512 Row group indices for column 4: - Entry 0: count: 10000 hasNull: false min: 0 max: 2 sum: 3 positions: 0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 0 max: 4 sum: 7 positions: 83,1808,0,76,272 - Entry 2: count: 10000 hasNull: false min: 0 max: 6 sum: 7 positions: 167,3616,0,156,32 - Entry 3: count: 10000 hasNull: false min: 0 max: 3 sum: 5 positions: 290,1328,0,232,304 - Entry 4: count: 10000 hasNull: false min: 0 max: 6 sum: 10 positions: 380,3136,0,312,64 + Entry 0: count: 10000 hasNull: false min: 0 max: 6 sum: 32 positions: 0,0,0,0,0 + Entry 1: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 84,1808,0,76,272 + Entry 2: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 160,3616,0,156,32 + Entry 3: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 274,1328,0,232,304 + Entry 4: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 350,3136,0,312,64 Row group indices for column 5: Entry 0: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:00:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,164,391,0,76,272 - Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,336,391,0,156,32 - Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:00:05.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:05.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,508,391,0,232,304 - Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:00:15.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:15.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,680,391,0,312,64 + Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,194,262,0,76,272 + Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,354,22,0,156,32 + Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304 + Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64 -File length: 13411 bytes +File length: 13004 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ diff --git a/ql/src/test/results/clientpositive/llap/orc_merge5.q.out b/ql/src/test/results/clientpositive/llap/orc_merge5.q.out index 57482f5..d49c72a 100644 --- a/ql/src/test/results/clientpositive/llap/orc_merge5.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_merge5.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n5 -POSTHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n5 -PREHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5b_n0 @@ -50,7 +50,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -62,7 +62,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5b_n0 Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp) outputColumnNames: userid, string1, subtype, decimal1, ts Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -73,7 +73,7 @@ STAGE PLANS: Reduce Output Operator sort order: Statistics: Num rows: 1 Data size: 2696 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) + value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) Execution mode: llap LLAP IO: all inputs Reducer 2 @@ -110,7 +110,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5b_n0 PREHOOK: query: insert overwrite table orc_merge5b_n0 select userid,string1,subtype,decimal1,ts from orc_merge5_n5 where userid<=13 @@ -121,7 +121,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b_n0 select userid,string1,sub POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n5 POSTHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.string1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.subtype SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.ts SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -180,7 +180,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -192,7 +192,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5b_n0 Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp) outputColumnNames: userid, string1, subtype, decimal1, ts Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -203,7 +203,7 @@ STAGE PLANS: Reduce Output Operator sort order: Statistics: Num rows: 1 Data size: 2696 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) + value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) Execution mode: llap LLAP IO: all inputs Reducer 2 @@ -249,7 +249,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5b_n0 Stage: Stage-4 @@ -288,7 +288,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b_n0 select userid,string1,sub POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n5 POSTHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.string1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.subtype SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.ts SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -322,7 +322,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b_n0 select userid,string1,sub POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n5 POSTHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.string1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.subtype SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.ts SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:ts, type:timestamp, comment:null), ] diff --git a/ql/src/test/results/clientpositive/llap/orc_merge6.q.out b/ql/src/test/results/clientpositive/llap/orc_merge6.q.out index 3a25787..1359111 100644 --- a/ql/src/test/results/clientpositive/llap/orc_merge6.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_merge6.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n4 -POSTHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n4 -PREHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (year string, hour int) stored as orc +PREHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (year string, hour int) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5a_n1 -POSTHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (year string, hour int) stored as orc +POSTHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (year string, hour int) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5a_n1 @@ -50,7 +50,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -62,7 +62,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5a_n1 Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int) outputColumnNames: userid, string1, subtype, decimal1, ts, year, hour Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -76,7 +76,7 @@ STAGE PLANS: sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: int) Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE - value expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct) + value expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct) Execution mode: llap LLAP IO: all inputs Reducer 2 @@ -89,7 +89,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col0 (type: string), _col1 (type: int) + expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col0 (type: string), _col1 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -121,7 +121,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5a_n1 PREHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2000",hour=24) select userid,string1,subtype,decimal1,ts from orc_merge5_n4 where userid<=13 @@ -132,7 +132,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2000",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2000/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -145,7 +145,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2001",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -233,7 +233,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -245,7 +245,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5a_n1 Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int) outputColumnNames: userid, string1, subtype, decimal1, ts, year, hour Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -259,7 +259,7 @@ STAGE PLANS: sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: int) Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE - value expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct) + value expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct) Execution mode: llap LLAP IO: all inputs Reducer 2 @@ -272,7 +272,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col0 (type: string), _col1 (type: int) + expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col0 (type: string), _col1 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -313,7 +313,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5a_n1 Stage: Stage-4 @@ -352,7 +352,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2000",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2000/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -365,7 +365,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2001",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -428,7 +428,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2000",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2000/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -441,7 +441,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2001",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] diff --git a/ql/src/test/results/clientpositive/llap/orc_merge7.q.out b/ql/src/test/results/clientpositive/llap/orc_merge7.q.out index c1e4fc6..273a5be 100644 --- a/ql/src/test/results/clientpositive/llap/orc_merge7.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_merge7.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5_n2 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n2 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n2 -POSTHOOK: query: create table orc_merge5_n2 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n2 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n2 -PREHOOK: query: create table orc_merge5a_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc +PREHOOK: query: create table orc_merge5a_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5a_n0 -POSTHOOK: query: create table orc_merge5a_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc +POSTHOOK: query: create table orc_merge5a_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5a_n0 @@ -46,7 +46,7 @@ STAGE PLANS: alias: orc_merge5_n2 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp), subtype (type: double) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -58,7 +58,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5a_n0 Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp), _col5 (type: double) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), _col5 (type: double) outputColumnNames: userid, string1, subtype, decimal1, ts, st Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -72,7 +72,7 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col0 (type: double) Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE - value expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct) + value expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct) Execution mode: llap LLAP IO: all inputs Reducer 2 @@ -85,7 +85,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col0 (type: double) + expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col0 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -116,7 +116,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5a_n0 PREHOOK: query: insert overwrite table orc_merge5a_n0 partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5_n2 @@ -130,22 +130,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -161,22 +161,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -263,7 +263,7 @@ STAGE PLANS: alias: orc_merge5_n2 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp), subtype (type: double) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -275,7 +275,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5a_n0 Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp), _col5 (type: double) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), _col5 (type: double) outputColumnNames: userid, string1, subtype, decimal1, ts, st Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -289,7 +289,7 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col0 (type: double) Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE - value expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct) + value expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct) Execution mode: llap LLAP IO: all inputs Reducer 2 @@ -302,7 +302,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col0 (type: double) + expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col0 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -342,7 +342,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5a_n0 Stage: Stage-4 @@ -384,22 +384,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -415,22 +415,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -499,22 +499,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -530,22 +530,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] diff --git a/ql/src/test/results/clientpositive/llap/orc_merge_incompat1.q.out b/ql/src/test/results/clientpositive/llap/orc_merge_incompat1.q.out index 307e730..676af08 100644 --- a/ql/src/test/results/clientpositive/llap/orc_merge_incompat1.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_merge_incompat1.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n3 -POSTHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n3 -PREHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5b -POSTHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5b @@ -49,7 +49,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -61,7 +61,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5b Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp) outputColumnNames: userid, string1, subtype, decimal1, ts Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -72,7 +72,7 @@ STAGE PLANS: Reduce Output Operator sort order: Statistics: Num rows: 1 Data size: 2696 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) + value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) Execution mode: llap LLAP IO: all inputs Reducer 2 @@ -109,7 +109,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5b PREHOOK: query: insert overwrite table orc_merge5b select userid,string1,subtype,decimal1,ts from orc_merge5_n3 where userid<=13 @@ -120,7 +120,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b select userid,string1,subtyp POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -133,7 +133,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -146,7 +146,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -159,7 +159,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -172,7 +172,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -185,7 +185,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] diff --git a/ql/src/test/results/clientpositive/llap/orc_merge_incompat2.q.out b/ql/src/test/results/clientpositive/llap/orc_merge_incompat2.q.out index 7be4ffa..00d0a14 100644 --- a/ql/src/test/results/clientpositive/llap/orc_merge_incompat2.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_merge_incompat2.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5 -POSTHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5 -PREHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc +PREHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5a -POSTHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc +POSTHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5a @@ -46,7 +46,7 @@ STAGE PLANS: alias: orc_merge5 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp), subtype (type: double) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -58,7 +58,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5a Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp), _col5 (type: double) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), _col5 (type: double) outputColumnNames: userid, string1, subtype, decimal1, ts, st Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -72,7 +72,7 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col0 (type: double) Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE - value expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct) + value expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct) Execution mode: llap LLAP IO: all inputs Reducer 2 @@ -85,7 +85,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col0 (type: double) + expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col0 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 352 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -116,7 +116,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5a PREHOOK: query: insert overwrite table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5 order by userid @@ -130,22 +130,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -161,22 +161,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -192,22 +192,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -223,22 +223,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] diff --git a/ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out b/ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out index ad8aef0..a2224a1 100644 --- a/ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out @@ -203,7 +203,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n2 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16676 + HDFS_BYTES_READ: 16671 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 7 HDFS_LARGE_READ_OPS: 0 @@ -1705,7 +1705,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n2 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 4912 + HDFS_BYTES_READ: 4913 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 4 HDFS_LARGE_READ_OPS: 0 @@ -1728,7 +1728,7 @@ Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 524288 ALLOCATED_USED_BYTES: 8527 CACHE_HIT_BYTES: 24 - CACHE_MISS_BYTES: 4912 + CACHE_MISS_BYTES: 4913 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 NUM_VECTOR_BATCHES: 3 @@ -1767,7 +1767,7 @@ Stage-1 HIVE COUNTERS: Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 262144 ALLOCATED_USED_BYTES: 2376 - CACHE_HIT_BYTES: 4936 + CACHE_HIT_BYTES: 4937 CACHE_MISS_BYTES: 1751 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -1805,7 +1805,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 2 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 4936 + CACHE_HIT_BYTES: 4937 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -1843,7 +1843,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 2 RECORDS_OUT_OPERATOR_TS_0: 100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 6687 + CACHE_HIT_BYTES: 6688 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -1882,7 +1882,7 @@ Stage-1 HIVE COUNTERS: Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 2359296 ALLOCATED_USED_BYTES: 44166 - CACHE_HIT_BYTES: 30884 + CACHE_HIT_BYTES: 30672 CACHE_MISS_BYTES: 10129 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 diff --git a/ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out b/ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out index 433c2c3..52c362f 100644 --- a/ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out @@ -203,7 +203,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17011 + HDFS_BYTES_READ: 17006 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 7 HDFS_LARGE_READ_OPS: 0 @@ -1071,7 +1071,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16901 + HDFS_BYTES_READ: 16896 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1103,7 +1103,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17731 + HDFS_BYTES_READ: 17726 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1135,7 +1135,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16901 + HDFS_BYTES_READ: 16896 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1167,7 +1167,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17731 + HDFS_BYTES_READ: 17726 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1199,7 +1199,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16901 + HDFS_BYTES_READ: 16896 HDFS_BYTES_WRITTEN: 102 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1231,7 +1231,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17731 + HDFS_BYTES_READ: 17726 HDFS_BYTES_WRITTEN: 102 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1263,7 +1263,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 4912 + HDFS_BYTES_READ: 4913 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 4 HDFS_LARGE_READ_OPS: 0 @@ -1286,7 +1286,7 @@ Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 524288 ALLOCATED_USED_BYTES: 8527 CACHE_HIT_BYTES: 24 - CACHE_MISS_BYTES: 4912 + CACHE_MISS_BYTES: 4913 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 NUM_VECTOR_BATCHES: 3 @@ -1325,7 +1325,7 @@ Stage-1 HIVE COUNTERS: Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 262144 ALLOCATED_USED_BYTES: 2376 - CACHE_HIT_BYTES: 4936 + CACHE_HIT_BYTES: 4937 CACHE_MISS_BYTES: 1751 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -1347,7 +1347,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 21461 + HDFS_BYTES_READ: 21457 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1379,7 +1379,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 23339 + HDFS_BYTES_READ: 23335 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1415,7 +1415,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 21461 + HDFS_BYTES_READ: 21457 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1447,7 +1447,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 23339 + HDFS_BYTES_READ: 23335 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1563,7 +1563,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 20632 + HDFS_BYTES_READ: 20627 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1595,7 +1595,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 22367 + HDFS_BYTES_READ: 22362 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1711,7 +1711,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 20632 + HDFS_BYTES_READ: 20627 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1743,7 +1743,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 22367 + HDFS_BYTES_READ: 22362 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1859,7 +1859,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 2183 + HDFS_BYTES_READ: 2184 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 4 HDFS_LARGE_READ_OPS: 0 @@ -1882,7 +1882,7 @@ Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 786432 ALLOCATED_USED_BYTES: 4293 CACHE_HIT_BYTES: 24 - CACHE_MISS_BYTES: 2183 + CACHE_MISS_BYTES: 2184 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 NUM_VECTOR_BATCHES: 3 @@ -1899,7 +1899,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 18750 + HDFS_BYTES_READ: 18746 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1953,7 +1953,7 @@ Stage-1 HIVE COUNTERS: Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 262144 ALLOCATED_USED_BYTES: 2376 - CACHE_HIT_BYTES: 2207 + CACHE_HIT_BYTES: 2208 CACHE_MISS_BYTES: 1217 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -1971,7 +1971,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 20076 + HDFS_BYTES_READ: 20072 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 diff --git a/ql/src/test/results/clientpositive/llap/orc_split_elimination.q.out b/ql/src/test/results/clientpositive/llap/orc_split_elimination.q.out index b26a28d..53119a6 100644 --- a/ql/src/test/results/clientpositive/llap/orc_split_elimination.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_split_elimination.q.out @@ -1,8 +1,8 @@ -PREHOOK: query: create table orc_split_elim (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_split_elim (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_split_elim -POSTHOOK: query: create table orc_split_elim (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_split_elim (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_split_elim @@ -158,11 +158,11 @@ POSTHOOK: Input: default@orc_split_elim 29 cat 8.0 3 1969-12-31 16:00:10 5 eat 0.8 6 1969-12-31 16:00:20 70 dog 1.8 4 1969-12-31 16:00:15 -PREHOOK: query: create table orc_split_elim_part (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (country string, year int) stored as orc +PREHOOK: query: create table orc_split_elim_part (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (country string, year int) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_split_elim_part -POSTHOOK: query: create table orc_split_elim_part (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (country string, year int) stored as orc +POSTHOOK: query: create table orc_split_elim_part (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (country string, year int) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_split_elim_part diff --git a/ql/src/test/results/clientpositive/llap/orc_struct_type_vectorization.q.out b/ql/src/test/results/clientpositive/llap/orc_struct_type_vectorization.q.out index 4cd56f8..da69c5f 100644 --- a/ql/src/test/results/clientpositive/llap/orc_struct_type_vectorization.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_struct_type_vectorization.q.out @@ -126,8 +126,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -267,8 +267,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out index d907fc8..f58338f 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out @@ -93,8 +93,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -235,8 +235,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -431,8 +431,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -566,8 +566,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -699,8 +699,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -823,8 +823,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -951,8 +951,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1070,8 +1070,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1191,8 +1191,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1303,8 +1303,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1438,8 +1438,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1564,8 +1564,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1676,8 +1676,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1767,8 +1767,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out index 9319218..9dddc12 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out @@ -92,8 +92,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -238,8 +238,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -446,8 +446,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -578,8 +578,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -707,8 +707,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -827,8 +827,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -951,8 +951,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1066,8 +1066,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1183,8 +1183,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1291,8 +1291,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1422,8 +1422,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1544,8 +1544,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1652,8 +1652,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1739,8 +1739,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out index 6fe97fb..0f3c600 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out @@ -93,8 +93,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -240,8 +240,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -449,8 +449,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -582,8 +582,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -712,8 +712,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -833,8 +833,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -958,8 +958,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1074,8 +1074,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1192,8 +1192,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1301,8 +1301,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1433,8 +1433,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1556,8 +1556,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1665,8 +1665,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1753,8 +1753,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out index 131d0fa..69167ef 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out @@ -107,8 +107,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -266,8 +266,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -479,8 +479,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -631,8 +631,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -852,8 +852,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1063,8 +1063,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1260,8 +1260,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1485,8 +1485,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1652,8 +1652,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out index bff87f9..78b5231 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out @@ -181,8 +181,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -485,8 +485,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -715,8 +715,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out index 06ae325..c786684 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out @@ -182,8 +182,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -487,8 +487,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -718,8 +718,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out index 966ab8f..c835afd 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out @@ -292,7 +292,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false @@ -543,7 +544,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false @@ -716,7 +718,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false @@ -873,7 +876,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false @@ -1111,7 +1115,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out index be57603..9c2460f 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out @@ -293,7 +293,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false @@ -545,7 +546,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false @@ -719,7 +721,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false @@ -877,7 +880,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false @@ -1116,7 +1120,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out index ef65472..6973081 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out @@ -107,8 +107,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -271,8 +271,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -497,8 +497,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -794,8 +794,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1000,8 +1000,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out index 5118f2d..51d72d7 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out @@ -108,8 +108,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -273,8 +273,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -500,8 +500,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -798,8 +798,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1005,8 +1005,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out index 5052fe6..ca2bfb7 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out @@ -92,8 +92,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -336,8 +335,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -497,8 +495,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -745,8 +742,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out index df136bf..8765301 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out @@ -182,8 +182,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -513,8 +512,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -775,8 +773,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -992,8 +989,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1198,7 +1194,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 3595 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:insert_num:int, 1:c1:char(12), 2:c2:char(25), 3:c3:varchar(25), 4:c4:varchar(10), 5:c5:decimal(12,4), 6:c6:decimal(20,10), 7:b:string, 8:part:int, 9:ROW__ID:struct] + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:char(12), 2:c2:char(25), 3:c3:varchar(25), 4:c4:varchar(10), 5:c5:decimal(12,4)/DECIMAL_64, 6:c6:decimal(20,10), 7:b:string, 8:part:int, 9:ROW__ID:struct] Select Operator expressions: insert_num (type: int), part (type: int), c1 (type: char(12)), c2 (type: char(25)), c3 (type: varchar(25)), c4 (type: varchar(10)), c5 (type: decimal(12,4)), c6 (type: decimal(20,10)), b (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 @@ -1223,8 +1219,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1232,7 +1227,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 8 includeColumns: [0, 1, 2, 3, 4, 5, 6, 7] - dataColumns: insert_num:int, c1:char(12), c2:char(25), c3:varchar(25), c4:varchar(10), c5:decimal(12,4), c6:decimal(20,10), b:string + dataColumns: insert_num:int, c1:char(12), c2:char(25), c3:varchar(25), c4:varchar(10), c5:decimal(12,4)/DECIMAL_64, c6:decimal(20,10), b:string partitionColumnCount: 1 partitionColumns: part:int scratchColumnTypeNames: [] diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out index 7259b33..36b53e5 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out @@ -88,8 +88,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -200,8 +199,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -314,8 +312,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -439,8 +436,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -590,8 +586,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -736,8 +731,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -875,8 +869,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1028,8 +1021,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1158,8 +1150,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out index 8f83622..867e134 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out @@ -88,8 +88,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -194,8 +193,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -293,8 +291,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -400,8 +397,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -501,8 +497,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -608,8 +603,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -716,8 +710,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -879,8 +872,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1005,8 +997,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1134,8 +1125,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1255,8 +1245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1377,8 +1366,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1556,8 +1544,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1726,8 +1713,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out b/ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out index f411b01..fba880b 100644 --- a/ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out @@ -490,8 +490,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -579,8 +579,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1023,8 +1023,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1150,8 +1150,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out b/ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out index 92366c8..ffe3bfb 100644 --- a/ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out @@ -166,8 +166,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -306,8 +306,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -446,8 +446,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out b/ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out index c99ac8d..5c35139 100644 --- a/ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out @@ -105,8 +105,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_annotate_stats_select.q.out b/ql/src/test/results/clientpositive/llap/vector_annotate_stats_select.q.out index 82ac85c..cb463a1 100644 --- a/ql/src/test/results/clientpositive/llap/vector_annotate_stats_select.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_annotate_stats_select.q.out @@ -413,8 +413,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -474,8 +474,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -535,8 +535,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -596,8 +596,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -657,8 +657,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -718,8 +718,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -779,8 +779,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -888,8 +888,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -949,8 +949,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1010,8 +1010,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1071,8 +1071,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1132,8 +1132,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1193,8 +1193,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1254,8 +1254,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1315,8 +1315,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1412,8 +1412,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1478,8 +1478,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1544,8 +1544,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1612,8 +1612,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1719,8 +1719,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1821,8 +1821,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1928,8 +1928,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2029,8 +2029,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2096,8 +2096,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out b/ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out index 559e28b..6238281 100644 --- a/ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out @@ -1267,8 +1267,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1310,8 +1310,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_between_columns.q.out b/ql/src/test/results/clientpositive/llap/vector_between_columns.q.out index c85c59e..1824976 100644 --- a/ql/src/test/results/clientpositive/llap/vector_between_columns.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_between_columns.q.out @@ -134,8 +134,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -168,8 +168,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -303,8 +303,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -337,8 +337,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_between_in.q.out b/ql/src/test/results/clientpositive/llap/vector_between_in.q.out index b1c0bab..7355ed8 100644 --- a/ql/src/test/results/clientpositive/llap/vector_between_in.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_between_in.q.out @@ -79,8 +79,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -184,8 +184,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -283,8 +283,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -388,8 +388,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -487,8 +487,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -582,8 +582,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -677,8 +677,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -782,8 +782,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1137,8 +1137,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1275,8 +1275,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1413,8 +1413,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1551,8 +1551,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out b/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out index 7d14542..e47c118 100644 --- a/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out @@ -152,6 +152,7 @@ STAGE PLANS: 0 _col10 (type: binary) 1 _col10 (type: binary) Map Join Vectorization: + bigTableValueExpressions: ConvertDecimal64ToDecimal(col 9:decimal(4,2)/DECIMAL_64) -> 12:decimal(4,2) className: VectorMapJoinInnerStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -165,13 +166,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [22] - selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16,_col17,_col18,_col19,_col20,_col21)) -> 22:int + projectedOutputColumnNums: [23] + selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16,_col17,_col18,_col19,_col20,_col21)) -> 23:int Statistics: Num rows: 10000 Data size: 6819968 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(_col0) Group By Vectorization: - aggregators: VectorUDAFSumLong(col 22:int) -> bigint + aggregators: VectorUDAFSumLong(col 23:int) -> bigint className: VectorGroupByOperator groupByMode: HASH native: false @@ -193,8 +194,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -236,8 +237,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -391,8 +392,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -594,8 +595,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -637,8 +638,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_bround.q.out b/ql/src/test/results/clientpositive/llap/vector_bround.q.out index 59996d6..02770a2 100644 --- a/ql/src/test/results/clientpositive/llap/vector_bround.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_bround.q.out @@ -95,8 +95,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out b/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out index 2581311..ab083e7 100644 --- a/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out @@ -140,7 +140,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT @@ -182,7 +181,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY Explain PLAN VECTORIZATION: @@ -197,9 +195,6 @@ STAGE PLANS: Stage: Stage-1 Tez #### A masked pattern was here #### - Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE) -#### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: @@ -210,11 +205,13 @@ STAGE PLANS: expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + + File Output Operator + compressed: false Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: date), _col5 (type: double), _col6 (type: double), _col7 (type: decimal(10,2)), _col8 (type: decimal(10,2)), _col9 (type: decimal(12,2)), _col10 (type: decimal(12,2)), _col11 (type: decimal(10,2)), _col12 (type: decimal(10,2)), _col13 (type: timestamp), _col14 (type: int), _col15 (type: int), _col16 (type: date) + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap LLAP IO: all inputs Map Vectorization: @@ -223,40 +220,6 @@ STAGE PLANS: inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat notVectorizedReason: SELECT operator: Unexpected hive type name void vectorized: false - Reducer 2 - Execution mode: vectorized, llap - Reduce Vectorization: - enabled: true - enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + - allNative: false - usesVectorUDFAdaptor: false - vectorized: true - rowBatchContext: - dataColumnCount: 17 - dataColumns: KEY.reducesinkkey0:int, VALUE._col0:string, VALUE._col1:string, VALUE._col2:string, VALUE._col3:date, VALUE._col4:double, VALUE._col5:double, VALUE._col6:decimal(10,2), VALUE._col7:decimal(10,2), VALUE._col8:decimal(12,2), VALUE._col9:decimal(12,2), VALUE._col10:decimal(10,2), VALUE._col11:decimal(10,2), VALUE._col12:timestamp, VALUE._col13:int, VALUE._col14:int, VALUE._col15:date - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Reduce Operator Tree: - Select Operator - expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: date), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: decimal(10,2)), VALUE._col7 (type: decimal(10,2)), VALUE._col8 (type: decimal(12,2)), VALUE._col9 (type: decimal(12,2)), VALUE._col10 (type: decimal(10,2)), VALUE._col11 (type: decimal(10,2)), VALUE._col12 (type: timestamp), VALUE._col13 (type: int), VALUE._col14 (type: int), VALUE._col15 (type: date) - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Select Vectorization: - className: VectorSelectOperator - native: true - projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] - Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - File Sink Vectorization: - className: VectorFileSinkOperator - native: false - Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -303,7 +266,6 @@ PREHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY PREHOOK: Input: default@lineitem_test #### A masked pattern was here #### @@ -346,112 +308,111 @@ POSTHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY POSTHOOK: Input: default@lineitem_test #### A masked pattern was here #### quantity quantity_description quantity_description_2 quantity_description_3 expected_date field_1 field_2 field_3 field_4 field_5 field_6 field_7 field_8 field_9 field_10 field_11 field_12 -NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 -1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 1 Single Single Single 1994-01-31 0.0 0.0 0.05 0.05 0.05 0.05 0.05 0.05 1994-01-28 00:00:00 NULL -36 2009-01-01 -2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 -2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 -3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 -3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 -3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 -4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 -4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 -5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 -5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 -5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 -6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 -6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 -7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 -8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 -8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 -9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 11 Many Many NULL 1994-03-22 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1994-03-27 00:00:00 NULL 10 2009-01-01 12 Many Many NULL 1996-05-12 12655.998 12655.998 0.03 0.03 0.03 0.03 0.03 0.03 1996-06-03 00:00:00 NULL 82 2009-01-01 12 Many Many NULL 1997-02-01 12156.034800000001 12156.034800000001 0.05 NULL 0.05 0.00 0.05 0.00 1997-02-22 00:00:00 NULL 1 2009-01-01 -13 Many Many NULL 1998-10-28 17554.68 17554.68 0.07 NULL 0.07 0.00 0.07 0.00 1998-11-06 00:00:00 NULL 53 2009-01-01 13 Many Many NULL 1993-04-06 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1993-04-08 00:00:00 NULL 4 2009-01-01 13 Many Many NULL 1994-03-08 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-03-26 00:00:00 NULL 41 2009-01-01 +13 Many Many NULL 1998-10-28 17554.68 17554.68 0.07 NULL 0.07 0.00 0.07 0.00 1998-11-06 00:00:00 NULL 53 2009-01-01 14 Many Many NULL 1995-01-04 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1995-01-27 00:00:00 NULL 66 2009-01-01 15 Many Many NULL 1994-11-05 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-11-20 00:00:00 NULL 81 2009-12-31 -17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 17 Many Many NULL 1994-07-07 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-07-03 00:00:00 NULL -4 2009-01-01 -19 Many Many NULL 1994-02-05 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1994-02-06 00:00:00 NULL -11 2009-01-01 +17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 19 Many Many NULL 1993-05-19 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1993-05-25 00:00:00 NULL 81 2009-01-01 +19 Many Many NULL 1994-02-05 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1994-02-06 00:00:00 NULL -11 2009-01-01 +2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 +2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 20 Many Many NULL 1998-07-02 32042.592 32042.592 0.01 NULL 0.01 0.00 0.01 0.00 1998-07-02 00:00:00 NULL 40 2009-01-01 -21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 21 Many Many NULL 1994-10-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-10-26 00:00:00 NULL 38 2009-01-01 -22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 +21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 22 Many Many NULL 1995-07-22 39353.82 39353.82 0.05 NULL 0.05 0.00 0.05 0.00 1995-07-19 00:00:00 NULL 45 2009-01-01 -23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 +22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 23 Many Many NULL 1994-07-24 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-07-25 00:00:00 NULL 26 2009-01-01 23 Many Many NULL 1994-10-13 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-10-24 00:00:00 NULL 79 2009-12-31 +23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 24 Many Many NULL 1996-02-26 31762.584 31762.584 0.00 0.00 0.00 0.00 0.00 0.00 1996-03-18 00:00:00 NULL 75 2009-01-01 24 Many Many NULL 1996-04-04 20542.032 20542.032 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-01 00:00:00 NULL 18 2009-12-31 25 Many Many NULL 1995-12-06 27263.995 27263.995 NULL NULL 0.00 0.00 0.00 0.00 1995-12-21 00:00:00 NULL -4 2009-01-01 25 Many Many NULL 1998-04-15 43064.1575 43064.1575 0.07 NULL 0.07 0.00 0.07 0.00 1998-04-11 00:00:00 NULL -11 2009-01-01 -26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 -26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 26 Many Many NULL 1993-11-03 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1993-11-04 00:00:00 NULL -44 2009-01-01 26 Many Many NULL 1994-10-21 0.0 0.0 0.08 NULL 0.08 0.00 0.08 0.00 1994-10-19 00:00:00 NULL 24 2009-01-01 -27 Many Many NULL 1998-06-29 45590.2425 45590.2425 NULL NULL 0.00 0.00 0.00 0.00 1998-06-29 00:00:00 NULL 4 2009-01-01 +26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 +26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 27 Many Many NULL 1994-01-26 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-01-23 00:00:00 NULL 62 2009-01-01 +27 Many Many NULL 1998-06-29 45590.2425 45590.2425 NULL NULL 0.00 0.00 0.00 0.00 1998-06-29 00:00:00 NULL 4 2009-01-01 28 Many Many NULL 1993-12-19 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-01-01 00:00:00 NULL -9 2009-01-01 -28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 28 Many Many NULL 1994-12-29 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1995-01-16 00:00:00 NULL 83 2009-01-01 28 Many Many NULL 1995-10-28 44866.219999999994 44866.219999999994 0.08 0.08 0.08 0.08 0.08 0.08 1995-10-26 00:00:00 NULL 60 2009-01-01 28 Many Many NULL 1996-02-06 45975.3616 45975.3616 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-28 00:00:00 NULL 66 2009-01-01 28 Many Many NULL 1996-03-26 30855.6612 30855.6612 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-20 00:00:00 NULL 12 2009-12-31 +28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 29 Many Many NULL 1997-01-30 39341.806 39341.806 NULL NULL 0.00 0.00 0.00 0.00 1997-01-27 00:00:00 NULL 0 2009-01-01 -30 Many Many NULL 1998-08-16 44561.46 44561.46 0.06 NULL 0.06 0.00 0.06 0.00 1998-08-14 00:00:00 NULL 34 2009-12-31 -30 Many Many NULL 1996-01-15 29770.173 29770.173 NULL NULL 0.00 0.00 0.00 0.00 1996-01-18 00:00:00 NULL 35 2009-12-31 +3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 +3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 +3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 30 Many Many NULL 1994-06-08 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-06-22 00:00:00 NULL 24 2009-01-01 +30 Many Many NULL 1996-01-15 29770.173 29770.173 NULL NULL 0.00 0.00 0.00 0.00 1996-01-18 00:00:00 NULL 35 2009-12-31 +30 Many Many NULL 1998-08-16 44561.46 44561.46 0.06 NULL 0.06 0.00 0.06 0.00 1998-08-14 00:00:00 NULL 34 2009-12-31 31 Many Many NULL 1993-11-03 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1993-11-08 00:00:00 NULL -41 2009-01-01 31 Many Many NULL 1994-02-24 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-02-20 00:00:00 NULL -19 2009-01-01 -32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 32 Many Many NULL 1993-12-14 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1993-12-28 00:00:00 NULL -7 2009-12-31 -32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 32 Many Many NULL 1994-08-29 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-08-31 00:00:00 NULL 14 2009-01-01 +32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 32 Many Many NULL 1996-02-04 46146.7488 46146.7488 NULL NULL 0.00 0.00 0.00 0.00 1996-02-03 00:00:00 NULL -4 2009-01-01 +32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 33 Many Many NULL 1998-04-17 54174.12 54174.12 0.01 NULL 0.01 0.00 0.01 0.00 1998-04-15 00:00:00 NULL 26 2009-01-01 -34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 34 Many Many NULL 1995-11-13 60586.5448 60586.5448 0.06 NULL 0.06 0.00 0.06 0.00 1995-11-26 00:00:00 NULL -50 2009-01-01 +34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 34 Many Many NULL 1998-03-10 56487.763199999994 56487.763199999994 NULL NULL 0.00 0.00 0.00 0.00 1998-03-30 00:00:00 NULL -23 2009-01-01 35 Many Many NULL 1996-01-21 40475.225 40475.225 0.03 0.03 0.03 0.03 0.03 0.03 1996-01-22 00:00:00 NULL -32 2009-01-01 36 Many Many NULL 1996-04-17 41844.6756 41844.6756 0.06 0.06 0.06 0.06 0.06 0.06 1996-04-20 00:00:00 NULL 52 2009-01-01 -37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 37 Many Many NULL 1992-05-02 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1992-05-02 00:00:00 NULL -13 2009-01-01 +37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 37 Many Many NULL 1994-02-18 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-02-21 00:00:00 NULL -23 2009-01-01 -38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 38 Many Many NULL 1996-02-16 68028.3144 68028.3144 NULL NULL 0.00 0.00 0.00 0.00 1996-02-18 00:00:00 NULL -6 2009-01-01 +38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 39 Many Many NULL 1992-07-07 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1992-07-28 00:00:00 NULL -21 2009-01-01 39 Many Many NULL 1998-02-03 45146.01 45146.01 NULL NULL 0.00 0.00 0.00 0.00 1998-02-18 00:00:00 NULL -48 2009-01-01 +4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 +4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 40 Many Many NULL 1992-07-26 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1992-08-15 00:00:00 NULL 14 2009-01-01 40 Many Many NULL 1996-12-13 51224.736 51224.736 0.05 NULL 0.05 0.00 0.05 0.00 1997-01-01 00:00:00 NULL 71 2009-01-01 +41 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-11 00:00:00 NULL -74 2009-01-01 41 Many Many NULL 1994-02-26 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-03-18 00:00:00 NULL 17 2009-01-01 41 Many Many NULL 1998-07-04 47989.6144 47989.6144 0.08 NULL 0.08 0.00 0.08 0.00 1998-07-06 00:00:00 NULL 9 2009-01-01 -41 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-11 00:00:00 NULL -74 2009-01-01 42 Many Many NULL 1994-08-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-28 00:00:00 NULL 33 2009-12-31 42 Many Many NULL 1996-02-13 68289.9672 68289.9672 0.00 NULL 0.00 0.00 0.00 0.00 1996-02-23 00:00:00 NULL 33 2009-01-01 -43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 43 Many Many NULL 1992-07-15 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1992-08-02 00:00:00 NULL 27 2009-01-01 -44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 +43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 44 Many Many NULL 1995-09-02 75106.658 75106.658 NULL NULL 0.00 0.00 0.00 0.00 1995-09-14 00:00:00 NULL 25 2009-01-01 44 Many Many NULL 1996-10-04 80882.4192 80882.4192 0.02 NULL 0.02 0.00 0.02 0.00 1996-09-30 00:00:00 NULL -48 2009-01-01 44 Many Many NULL 1996-11-19 48941.692800000004 48941.692800000004 0.06 NULL 0.06 0.00 0.06 0.00 1996-12-12 00:00:00 NULL -3 2009-01-01 +44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 45 Many Many NULL 1994-02-07 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-02-23 00:00:00 NULL 50 2009-01-01 45 Many Many NULL 1998-03-05 61489.35 61489.35 NULL NULL 0.00 0.00 0.00 0.00 1998-03-24 00:00:00 NULL 4 2009-01-01 46 Many Many NULL 1996-01-20 73475.892 73475.892 0.07 NULL 0.07 0.00 0.07 0.00 1996-02-03 00:00:00 NULL -53 2009-01-01 46 Many Many NULL 1996-10-01 77781.4092 77781.4092 NULL NULL 0.00 0.00 0.00 0.00 1996-10-26 00:00:00 NULL -54 2009-01-01 -46 Many Many NULL 1998-08-18 84565.5168 84565.5168 0.05 NULL 0.05 0.00 0.05 0.00 1998-08-29 00:00:00 NULL 52 2009-01-01 46 Many Many NULL 1998-07-01 56583.5144 56583.5144 0.05 NULL 0.05 0.00 0.05 0.00 1998-07-05 00:00:00 NULL 28 2009-01-01 +46 Many Many NULL 1998-08-18 84565.5168 84565.5168 0.05 NULL 0.05 0.00 0.05 0.00 1998-08-29 00:00:00 NULL 52 2009-01-01 48 Many Many NULL 1994-08-22 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-09-08 00:00:00 NULL 28 2009-01-01 49 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-24 00:00:00 NULL -26 2009-12-31 +5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 +5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 +5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 50 Many Many NULL 1994-08-13 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-26 00:00:00 NULL -48 2009-12-31 +6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 +6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 +7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 +8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 +8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 +9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT L_QUANTITY as Quantity, @@ -492,7 +453,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT @@ -534,7 +494,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY Explain PLAN VECTORIZATION: @@ -549,9 +508,6 @@ STAGE PLANS: Stage: Stage-1 Tez #### A masked pattern was here #### - Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE) -#### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: @@ -560,7 +516,7 @@ STAGE PLANS: Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2), 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct] + vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2)/DECIMAL_64, 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct] Select Operator expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 @@ -568,70 +524,35 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [4, 22, 24, 25, 26, 27, 28, 30, 31, 32, 33, 34, 35, 38, 40, 43, 44] - selectExpressions: IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 21:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 22:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 21:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprStringScalarStringScalar(col 20:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string) -> 22:string) -> 21:string) -> 22:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 24:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprColumnNull(col 20:boolean, col 21:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean, ConstantVectorExpression(val Many) -> 21:string) -> 23:string) -> 24:string) -> 23:string) -> 24:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprNullNull(null, null) -> 23:string) -> 25:string) -> 23:string) -> 25:string, IfExprLongColumnLongColumn(col 17:boolean, col 18:date, col 19:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 17:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 18:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 19:date) -> 26:date, IfExprDoubleColumnLongScalar(col 17:boolean, col 28:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 27:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 27:double) -> 28:double) -> 27:double, IfExprDoubleColumnDoubleScalar(col 17:boolean, col 29:double, val 0.0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 28:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 28:double) -> 29:double) -> 28:double, IfExprNullColumn(col 17:boolean, null, col 7)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 17:boolean, col 7:decimal(10,2)) -> 30:decimal(10,2), IfExprColumnNull(col 18:boolean, col 7:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 18:boolean, col 7:decimal(10,2)) -> 31:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 32:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 33:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 34:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 35:decimal(10,2), IfExprTimestampColumnColumn(col 19:boolean, col 36:timestampcol 37:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 19:boolean, CastDateToTimestamp(col 12:date) -> 36:timestamp, CastDateToTimestamp(col 11:date) -> 37:timestamp) -> 38:timestamp, IfExprColumnNull(col 19:boolean, col 39:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 19:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 39:int) -> 40:int, IfExprNullColumn(col 41:boolean, null, col 42)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 41:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 42:int) -> 43:int, IfExprLongScalarLongScalar(col 45:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 44:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 44:int) -> 45:boolean) -> 44:date + selectExpressions: IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 21:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 22:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 21:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprStringScalarStringScalar(col 20:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string) -> 22:string) -> 21:string) -> 22:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 24:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprColumnNull(col 20:boolean, col 21:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean, ConstantVectorExpression(val Many) -> 21:string) -> 23:string) -> 24:string) -> 23:string) -> 24:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprNullNull(null, null) -> 23:string) -> 25:string) -> 23:string) -> 25:string, IfExprLongColumnLongColumn(col 17:boolean, col 18:date, col 19:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 17:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 18:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 19:date) -> 26:date, IfExprDoubleColumnLongScalar(col 17:boolean, col 28:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 27:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 27:double) -> 28:double) -> 27:double, IfExprDoubleColumnDoubleScalar(col 17:boolean, col 29:double, val 0.0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 28:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 28:double) -> 29:double) -> 28:double, IfExprNullColumn(col 17:boolean, null, col 46)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 17:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 46:decimal(10,2)) -> 30:decimal(10,2), IfExprColumnNull(col 18:boolean, col 47:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 18:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 47:decimal(10,2)) -> 31:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 32:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 33:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 34:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 35:decimal(10,2), IfExprTimestampColumnColumn(col 19:boolean, col 36:timestampcol 37:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 19:boolean, CastDateToTimestamp(col 12:date) -> 36:timestamp, CastDateToTimestamp(col 11:date) -> 37:timestamp) -> 38:timestamp, IfExprColumnNull(col 19:boolean, col 39:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 19:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 39:int) -> 40:int, IfExprNullColumn(col 41:boolean, null, col 42)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 41:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 42:int) -> 43:int, IfExprLongScalarLongScalar(col 45:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 44:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 44:int) -> 45:boolean) -> 44:date Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [22, 24, 25, 26, 27, 28, 30, 31, 32, 33, 34, 35, 38, 40, 43, 44] + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: date), _col5 (type: double), _col6 (type: double), _col7 (type: decimal(10,2)), _col8 (type: decimal(10,2)), _col9 (type: decimal(12,2)), _col10 (type: decimal(12,2)), _col11 (type: decimal(10,2)), _col12 (type: decimal(10,2)), _col13 (type: timestamp), _col14 (type: int), _col15 (type: int), _col16 (type: date) + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: true + allNative: false usesVectorUDFAdaptor: true vectorized: true rowBatchContext: dataColumnCount: 16 includeColumns: [1, 2, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14] - dataColumns: l_orderkey:int, l_partkey:int, l_suppkey:int, l_linenumber:int, l_quantity:int, l_extendedprice:double, l_discount:double, l_tax:decimal(10,2), l_returnflag:char(1), l_linestatus:char(1), l_shipdate:date, l_commitdate:date, l_receiptdate:date, l_shipinstruct:varchar(20), l_shipmode:char(10), l_comment:string - partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, bigint, bigint, bigint, string, string, string, string, string, bigint, double, double, double, decimal(10,2), decimal(10,2), decimal(12,2), decimal(12,2), decimal(10,2), decimal(10,2), timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint] - Reducer 2 - Execution mode: vectorized, llap - Reduce Vectorization: - enabled: true - enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + - allNative: false - usesVectorUDFAdaptor: false - vectorized: true - rowBatchContext: - dataColumnCount: 17 - dataColumns: KEY.reducesinkkey0:int, VALUE._col0:string, VALUE._col1:string, VALUE._col2:string, VALUE._col3:date, VALUE._col4:double, VALUE._col5:double, VALUE._col6:decimal(10,2), VALUE._col7:decimal(10,2), VALUE._col8:decimal(12,2), VALUE._col9:decimal(12,2), VALUE._col10:decimal(10,2), VALUE._col11:decimal(10,2), VALUE._col12:timestamp, VALUE._col13:int, VALUE._col14:int, VALUE._col15:date + dataColumns: l_orderkey:int, l_partkey:int, l_suppkey:int, l_linenumber:int, l_quantity:int, l_extendedprice:double, l_discount:double, l_tax:decimal(10,2)/DECIMAL_64, l_returnflag:char(1), l_linestatus:char(1), l_shipdate:date, l_commitdate:date, l_receiptdate:date, l_shipinstruct:varchar(20), l_shipmode:char(10), l_comment:string partitionColumnCount: 0 - scratchColumnTypeNames: [] - Reduce Operator Tree: - Select Operator - expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: date), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: decimal(10,2)), VALUE._col7 (type: decimal(10,2)), VALUE._col8 (type: decimal(12,2)), VALUE._col9 (type: decimal(12,2)), VALUE._col10 (type: decimal(10,2)), VALUE._col11 (type: decimal(10,2)), VALUE._col12 (type: timestamp), VALUE._col13 (type: int), VALUE._col14 (type: int), VALUE._col15 (type: date) - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Select Vectorization: - className: VectorSelectOperator - native: true - projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] - Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - File Sink Vectorization: - className: VectorFileSinkOperator - native: false - Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + scratchColumnTypeNames: [bigint, bigint, bigint, bigint, string, string, string, string, string, bigint, double, double, double, decimal(10,2), decimal(10,2), decimal(12,2), decimal(12,2), decimal(10,2), decimal(10,2), timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint, decimal(10,2), decimal(10,2)] Stage: Stage-0 Fetch Operator @@ -678,7 +599,6 @@ PREHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY PREHOOK: Input: default@lineitem_test #### A masked pattern was here #### @@ -721,112 +641,111 @@ POSTHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY POSTHOOK: Input: default@lineitem_test #### A masked pattern was here #### quantity quantity_description quantity_description_2 quantity_description_3 expected_date field_1 field_2 field_3 field_4 field_5 field_6 field_7 field_8 field_9 field_10 field_11 field_12 -NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 -1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 1 Single Single Single 1994-01-31 0.0 0.0 0.05 0.05 0.05 0.05 0.05 0.05 1994-01-28 00:00:00 NULL -36 2009-01-01 -2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 -2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 -3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 -3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 -3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 -4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 -4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 -5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 -5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 -5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 -6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 -6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 -7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 -8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 -8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 -9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 11 Many Many NULL 1994-03-22 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1994-03-27 00:00:00 NULL 10 2009-01-01 12 Many Many NULL 1996-05-12 12655.998 12655.998 0.03 0.03 0.03 0.03 0.03 0.03 1996-06-03 00:00:00 NULL 82 2009-01-01 12 Many Many NULL 1997-02-01 12156.034800000001 12156.034800000001 0.05 NULL 0.05 0.00 0.05 0.00 1997-02-22 00:00:00 NULL 1 2009-01-01 -13 Many Many NULL 1998-10-28 17554.68 17554.68 0.07 NULL 0.07 0.00 0.07 0.00 1998-11-06 00:00:00 NULL 53 2009-01-01 13 Many Many NULL 1993-04-06 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1993-04-08 00:00:00 NULL 4 2009-01-01 13 Many Many NULL 1994-03-08 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-03-26 00:00:00 NULL 41 2009-01-01 +13 Many Many NULL 1998-10-28 17554.68 17554.68 0.07 NULL 0.07 0.00 0.07 0.00 1998-11-06 00:00:00 NULL 53 2009-01-01 14 Many Many NULL 1995-01-04 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1995-01-27 00:00:00 NULL 66 2009-01-01 15 Many Many NULL 1994-11-05 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-11-20 00:00:00 NULL 81 2009-12-31 -17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 17 Many Many NULL 1994-07-07 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-07-03 00:00:00 NULL -4 2009-01-01 -19 Many Many NULL 1994-02-05 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1994-02-06 00:00:00 NULL -11 2009-01-01 +17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 19 Many Many NULL 1993-05-19 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1993-05-25 00:00:00 NULL 81 2009-01-01 +19 Many Many NULL 1994-02-05 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1994-02-06 00:00:00 NULL -11 2009-01-01 +2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 +2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 20 Many Many NULL 1998-07-02 32042.592 32042.592 0.01 NULL 0.01 0.00 0.01 0.00 1998-07-02 00:00:00 NULL 40 2009-01-01 -21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 21 Many Many NULL 1994-10-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-10-26 00:00:00 NULL 38 2009-01-01 -22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 +21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 22 Many Many NULL 1995-07-22 39353.82 39353.82 0.05 NULL 0.05 0.00 0.05 0.00 1995-07-19 00:00:00 NULL 45 2009-01-01 -23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 +22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 23 Many Many NULL 1994-07-24 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-07-25 00:00:00 NULL 26 2009-01-01 23 Many Many NULL 1994-10-13 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-10-24 00:00:00 NULL 79 2009-12-31 +23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 24 Many Many NULL 1996-02-26 31762.584 31762.584 0.00 0.00 0.00 0.00 0.00 0.00 1996-03-18 00:00:00 NULL 75 2009-01-01 24 Many Many NULL 1996-04-04 20542.032 20542.032 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-01 00:00:00 NULL 18 2009-12-31 25 Many Many NULL 1995-12-06 27263.995 27263.995 NULL NULL 0.00 0.00 0.00 0.00 1995-12-21 00:00:00 NULL -4 2009-01-01 25 Many Many NULL 1998-04-15 43064.1575 43064.1575 0.07 NULL 0.07 0.00 0.07 0.00 1998-04-11 00:00:00 NULL -11 2009-01-01 -26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 -26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 26 Many Many NULL 1993-11-03 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1993-11-04 00:00:00 NULL -44 2009-01-01 26 Many Many NULL 1994-10-21 0.0 0.0 0.08 NULL 0.08 0.00 0.08 0.00 1994-10-19 00:00:00 NULL 24 2009-01-01 -27 Many Many NULL 1998-06-29 45590.2425 45590.2425 NULL NULL 0.00 0.00 0.00 0.00 1998-06-29 00:00:00 NULL 4 2009-01-01 +26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 +26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 27 Many Many NULL 1994-01-26 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-01-23 00:00:00 NULL 62 2009-01-01 +27 Many Many NULL 1998-06-29 45590.2425 45590.2425 NULL NULL 0.00 0.00 0.00 0.00 1998-06-29 00:00:00 NULL 4 2009-01-01 28 Many Many NULL 1993-12-19 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-01-01 00:00:00 NULL -9 2009-01-01 -28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 28 Many Many NULL 1994-12-29 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1995-01-16 00:00:00 NULL 83 2009-01-01 28 Many Many NULL 1995-10-28 44866.219999999994 44866.219999999994 0.08 0.08 0.08 0.08 0.08 0.08 1995-10-26 00:00:00 NULL 60 2009-01-01 28 Many Many NULL 1996-02-06 45975.3616 45975.3616 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-28 00:00:00 NULL 66 2009-01-01 28 Many Many NULL 1996-03-26 30855.6612 30855.6612 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-20 00:00:00 NULL 12 2009-12-31 +28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 29 Many Many NULL 1997-01-30 39341.806 39341.806 NULL NULL 0.00 0.00 0.00 0.00 1997-01-27 00:00:00 NULL 0 2009-01-01 -30 Many Many NULL 1998-08-16 44561.46 44561.46 0.06 NULL 0.06 0.00 0.06 0.00 1998-08-14 00:00:00 NULL 34 2009-12-31 -30 Many Many NULL 1996-01-15 29770.173 29770.173 NULL NULL 0.00 0.00 0.00 0.00 1996-01-18 00:00:00 NULL 35 2009-12-31 +3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 +3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 +3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 30 Many Many NULL 1994-06-08 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-06-22 00:00:00 NULL 24 2009-01-01 +30 Many Many NULL 1996-01-15 29770.173 29770.173 NULL NULL 0.00 0.00 0.00 0.00 1996-01-18 00:00:00 NULL 35 2009-12-31 +30 Many Many NULL 1998-08-16 44561.46 44561.46 0.06 NULL 0.06 0.00 0.06 0.00 1998-08-14 00:00:00 NULL 34 2009-12-31 31 Many Many NULL 1993-11-03 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1993-11-08 00:00:00 NULL -41 2009-01-01 31 Many Many NULL 1994-02-24 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-02-20 00:00:00 NULL -19 2009-01-01 -32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 32 Many Many NULL 1993-12-14 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1993-12-28 00:00:00 NULL -7 2009-12-31 -32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 32 Many Many NULL 1994-08-29 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-08-31 00:00:00 NULL 14 2009-01-01 +32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 32 Many Many NULL 1996-02-04 46146.7488 46146.7488 NULL NULL 0.00 0.00 0.00 0.00 1996-02-03 00:00:00 NULL -4 2009-01-01 +32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 33 Many Many NULL 1998-04-17 54174.12 54174.12 0.01 NULL 0.01 0.00 0.01 0.00 1998-04-15 00:00:00 NULL 26 2009-01-01 -34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 34 Many Many NULL 1995-11-13 60586.5448 60586.5448 0.06 NULL 0.06 0.00 0.06 0.00 1995-11-26 00:00:00 NULL -50 2009-01-01 +34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 34 Many Many NULL 1998-03-10 56487.763199999994 56487.763199999994 NULL NULL 0.00 0.00 0.00 0.00 1998-03-30 00:00:00 NULL -23 2009-01-01 35 Many Many NULL 1996-01-21 40475.225 40475.225 0.03 0.03 0.03 0.03 0.03 0.03 1996-01-22 00:00:00 NULL -32 2009-01-01 36 Many Many NULL 1996-04-17 41844.6756 41844.6756 0.06 0.06 0.06 0.06 0.06 0.06 1996-04-20 00:00:00 NULL 52 2009-01-01 -37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 37 Many Many NULL 1992-05-02 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1992-05-02 00:00:00 NULL -13 2009-01-01 +37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 37 Many Many NULL 1994-02-18 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-02-21 00:00:00 NULL -23 2009-01-01 -38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 38 Many Many NULL 1996-02-16 68028.3144 68028.3144 NULL NULL 0.00 0.00 0.00 0.00 1996-02-18 00:00:00 NULL -6 2009-01-01 +38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 39 Many Many NULL 1992-07-07 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1992-07-28 00:00:00 NULL -21 2009-01-01 39 Many Many NULL 1998-02-03 45146.01 45146.01 NULL NULL 0.00 0.00 0.00 0.00 1998-02-18 00:00:00 NULL -48 2009-01-01 +4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 +4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 40 Many Many NULL 1992-07-26 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1992-08-15 00:00:00 NULL 14 2009-01-01 40 Many Many NULL 1996-12-13 51224.736 51224.736 0.05 NULL 0.05 0.00 0.05 0.00 1997-01-01 00:00:00 NULL 71 2009-01-01 +41 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-11 00:00:00 NULL -74 2009-01-01 41 Many Many NULL 1994-02-26 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-03-18 00:00:00 NULL 17 2009-01-01 41 Many Many NULL 1998-07-04 47989.6144 47989.6144 0.08 NULL 0.08 0.00 0.08 0.00 1998-07-06 00:00:00 NULL 9 2009-01-01 -41 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-11 00:00:00 NULL -74 2009-01-01 42 Many Many NULL 1994-08-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-28 00:00:00 NULL 33 2009-12-31 42 Many Many NULL 1996-02-13 68289.9672 68289.9672 0.00 NULL 0.00 0.00 0.00 0.00 1996-02-23 00:00:00 NULL 33 2009-01-01 -43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 43 Many Many NULL 1992-07-15 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1992-08-02 00:00:00 NULL 27 2009-01-01 -44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 +43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 44 Many Many NULL 1995-09-02 75106.658 75106.658 NULL NULL 0.00 0.00 0.00 0.00 1995-09-14 00:00:00 NULL 25 2009-01-01 44 Many Many NULL 1996-10-04 80882.4192 80882.4192 0.02 NULL 0.02 0.00 0.02 0.00 1996-09-30 00:00:00 NULL -48 2009-01-01 44 Many Many NULL 1996-11-19 48941.692800000004 48941.692800000004 0.06 NULL 0.06 0.00 0.06 0.00 1996-12-12 00:00:00 NULL -3 2009-01-01 +44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 45 Many Many NULL 1994-02-07 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-02-23 00:00:00 NULL 50 2009-01-01 45 Many Many NULL 1998-03-05 61489.35 61489.35 NULL NULL 0.00 0.00 0.00 0.00 1998-03-24 00:00:00 NULL 4 2009-01-01 46 Many Many NULL 1996-01-20 73475.892 73475.892 0.07 NULL 0.07 0.00 0.07 0.00 1996-02-03 00:00:00 NULL -53 2009-01-01 46 Many Many NULL 1996-10-01 77781.4092 77781.4092 NULL NULL 0.00 0.00 0.00 0.00 1996-10-26 00:00:00 NULL -54 2009-01-01 -46 Many Many NULL 1998-08-18 84565.5168 84565.5168 0.05 NULL 0.05 0.00 0.05 0.00 1998-08-29 00:00:00 NULL 52 2009-01-01 46 Many Many NULL 1998-07-01 56583.5144 56583.5144 0.05 NULL 0.05 0.00 0.05 0.00 1998-07-05 00:00:00 NULL 28 2009-01-01 +46 Many Many NULL 1998-08-18 84565.5168 84565.5168 0.05 NULL 0.05 0.00 0.05 0.00 1998-08-29 00:00:00 NULL 52 2009-01-01 48 Many Many NULL 1994-08-22 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-09-08 00:00:00 NULL 28 2009-01-01 49 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-24 00:00:00 NULL -26 2009-12-31 +5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 +5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 +5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 50 Many Many NULL 1994-08-13 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-26 00:00:00 NULL -48 2009-12-31 +6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 +6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 +7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 +8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 +8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 +9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT L_QUANTITY as Quantity, @@ -867,7 +786,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT @@ -909,7 +827,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY Explain PLAN VECTORIZATION: @@ -924,9 +841,6 @@ STAGE PLANS: Stage: Stage-1 Tez #### A masked pattern was here #### - Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE) -#### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: @@ -935,7 +849,7 @@ STAGE PLANS: Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2), 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct] + vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2)/DECIMAL_64, 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct] Select Operator expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 @@ -943,70 +857,35 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [4, 27, 38, 48, 52, 54, 60, 63, 65, 67, 68, 69, 70, 73, 76, 79, 80] - selectExpressions: IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 26:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, ConstantVectorExpression(val Single) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 19:boolean, ConstantVectorExpression(val Two) -> 20:string, IfExprColumnCondExpr(col 21:boolean, col 22:stringcol 24:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 21:boolean, ConstantVectorExpression(val Some) -> 22:string, IfExprStringScalarStringScalar(col 23:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 23:boolean) -> 24:string) -> 25:string) -> 26:string) -> 27:string, IfExprColumnCondExpr(col 23:boolean, col 28:stringcol 37:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 23:boolean, ConstantVectorExpression(val Single) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 36:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 29:boolean, ConstantVectorExpression(val Two) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 35:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 31:boolean, ConstantVectorExpression(val Some) -> 32:string, IfExprColumnNull(col 33:boolean, col 34:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 33:boolean, ConstantVectorExpression(val Many) -> 34:string) -> 35:string) -> 36:string) -> 37:string) -> 38:string, IfExprColumnCondExpr(col 39:boolean, col 40:stringcol 47:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 39:boolean, ConstantVectorExpression(val Single) -> 40:string, IfExprColumnCondExpr(col 41:boolean, col 42:stringcol 46:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 41:boolean, ConstantVectorExpression(val Two) -> 42:string, IfExprColumnCondExpr(col 43:boolean, col 44:stringcol 45:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 43:boolean, ConstantVectorExpression(val Some) -> 44:string, IfExprNullNull(null, null) -> 45:string) -> 46:string) -> 47:string) -> 48:string, IfExprCondExprCondExpr(col 49:boolean, col 50:datecol 51:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 49:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 50:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 51:date) -> 52:date, IfExprDoubleColumnLongScalar(col 57:boolean, col 58:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 54:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 54:double) -> 58:double) -> 54:double, IfExprCondExprColumn(col 57:boolean, col 59:double, col 58:double)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 58:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 58:double) -> 59:double, ConstantVectorExpression(val 0.0) -> 58:double) -> 60:double, IfExprNullColumn(col 62:boolean, null, col 7)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 62:boolean, col 7:decimal(10,2)) -> 63:decimal(10,2), IfExprColumnNull(col 64:boolean, col 7:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 64:boolean, col 7:decimal(10,2)) -> 65:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 67:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 68:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 69:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 70:decimal(10,2), IfExprCondExprCondExpr(col 66:boolean, col 71:timestampcol 72:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 66:boolean, CastDateToTimestamp(col 12:date) -> 71:timestamp, CastDateToTimestamp(col 11:date) -> 72:timestamp) -> 73:timestamp, IfExprCondExprNull(col 74:boolean, col 75:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 74:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 75:int) -> 76:int, IfExprNullCondExpr(col 77:boolean, null, col 78:int)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 77:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 78:int) -> 79:int, IfExprLongScalarLongScalar(col 81:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 80:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 80:int) -> 81:boolean) -> 80:date + selectExpressions: IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 26:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, ConstantVectorExpression(val Single) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 19:boolean, ConstantVectorExpression(val Two) -> 20:string, IfExprColumnCondExpr(col 21:boolean, col 22:stringcol 24:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 21:boolean, ConstantVectorExpression(val Some) -> 22:string, IfExprStringScalarStringScalar(col 23:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 23:boolean) -> 24:string) -> 25:string) -> 26:string) -> 27:string, IfExprColumnCondExpr(col 23:boolean, col 28:stringcol 37:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 23:boolean, ConstantVectorExpression(val Single) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 36:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 29:boolean, ConstantVectorExpression(val Two) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 35:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 31:boolean, ConstantVectorExpression(val Some) -> 32:string, IfExprColumnNull(col 33:boolean, col 34:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 33:boolean, ConstantVectorExpression(val Many) -> 34:string) -> 35:string) -> 36:string) -> 37:string) -> 38:string, IfExprColumnCondExpr(col 39:boolean, col 40:stringcol 47:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 39:boolean, ConstantVectorExpression(val Single) -> 40:string, IfExprColumnCondExpr(col 41:boolean, col 42:stringcol 46:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 41:boolean, ConstantVectorExpression(val Two) -> 42:string, IfExprColumnCondExpr(col 43:boolean, col 44:stringcol 45:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 43:boolean, ConstantVectorExpression(val Some) -> 44:string, IfExprNullNull(null, null) -> 45:string) -> 46:string) -> 47:string) -> 48:string, IfExprCondExprCondExpr(col 49:boolean, col 50:datecol 51:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 49:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 50:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 51:date) -> 52:date, IfExprDoubleColumnLongScalar(col 57:boolean, col 58:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 54:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 54:double) -> 58:double) -> 54:double, IfExprCondExprColumn(col 57:boolean, col 59:double, col 58:double)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 58:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 58:double) -> 59:double, ConstantVectorExpression(val 0.0) -> 58:double) -> 60:double, IfExprNullColumn(col 62:boolean, null, col 82)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 62:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 82:decimal(10,2)) -> 63:decimal(10,2), IfExprColumnNull(col 64:boolean, col 83:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 64:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 83:decimal(10,2)) -> 65:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 67:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 68:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 69:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 70:decimal(10,2), IfExprCondExprCondExpr(col 66:boolean, col 71:timestampcol 72:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 66:boolean, CastDateToTimestamp(col 12:date) -> 71:timestamp, CastDateToTimestamp(col 11:date) -> 72:timestamp) -> 73:timestamp, IfExprCondExprNull(col 74:boolean, col 75:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 74:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 75:int) -> 76:int, IfExprNullCondExpr(col 77:boolean, null, col 78:int)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 77:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 78:int) -> 79:int, IfExprLongScalarLongScalar(col 81:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 80:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 80:int) -> 81:boolean) -> 80:date Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [27, 38, 48, 52, 54, 60, 63, 65, 67, 68, 69, 70, 73, 76, 79, 80] + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: date), _col5 (type: double), _col6 (type: double), _col7 (type: decimal(10,2)), _col8 (type: decimal(10,2)), _col9 (type: decimal(12,2)), _col10 (type: decimal(12,2)), _col11 (type: decimal(10,2)), _col12 (type: decimal(10,2)), _col13 (type: timestamp), _col14 (type: int), _col15 (type: int), _col16 (type: date) + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: true + allNative: false usesVectorUDFAdaptor: true vectorized: true rowBatchContext: dataColumnCount: 16 includeColumns: [1, 2, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14] - dataColumns: l_orderkey:int, l_partkey:int, l_suppkey:int, l_linenumber:int, l_quantity:int, l_extendedprice:double, l_discount:double, l_tax:decimal(10,2), l_returnflag:char(1), l_linestatus:char(1), l_shipdate:date, l_commitdate:date, l_receiptdate:date, l_shipinstruct:varchar(20), l_shipmode:char(10), l_comment:string - partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, bigint, bigint, bigint, bigint, double, double, bigint, bigint, double, double, double, string, bigint, decimal(10,2), bigint, decimal(10,2), bigint, decimal(12,2), decimal(12,2), decimal(10,2), decimal(10,2), timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint] - Reducer 2 - Execution mode: vectorized, llap - Reduce Vectorization: - enabled: true - enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + - allNative: false - usesVectorUDFAdaptor: false - vectorized: true - rowBatchContext: - dataColumnCount: 17 - dataColumns: KEY.reducesinkkey0:int, VALUE._col0:string, VALUE._col1:string, VALUE._col2:string, VALUE._col3:date, VALUE._col4:double, VALUE._col5:double, VALUE._col6:decimal(10,2), VALUE._col7:decimal(10,2), VALUE._col8:decimal(12,2), VALUE._col9:decimal(12,2), VALUE._col10:decimal(10,2), VALUE._col11:decimal(10,2), VALUE._col12:timestamp, VALUE._col13:int, VALUE._col14:int, VALUE._col15:date + dataColumns: l_orderkey:int, l_partkey:int, l_suppkey:int, l_linenumber:int, l_quantity:int, l_extendedprice:double, l_discount:double, l_tax:decimal(10,2)/DECIMAL_64, l_returnflag:char(1), l_linestatus:char(1), l_shipdate:date, l_commitdate:date, l_receiptdate:date, l_shipinstruct:varchar(20), l_shipmode:char(10), l_comment:string partitionColumnCount: 0 - scratchColumnTypeNames: [] - Reduce Operator Tree: - Select Operator - expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: date), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: decimal(10,2)), VALUE._col7 (type: decimal(10,2)), VALUE._col8 (type: decimal(12,2)), VALUE._col9 (type: decimal(12,2)), VALUE._col10 (type: decimal(10,2)), VALUE._col11 (type: decimal(10,2)), VALUE._col12 (type: timestamp), VALUE._col13 (type: int), VALUE._col14 (type: int), VALUE._col15 (type: date) - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Select Vectorization: - className: VectorSelectOperator - native: true - projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] - Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - File Sink Vectorization: - className: VectorFileSinkOperator - native: false - Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + scratchColumnTypeNames: [bigint, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, bigint, bigint, bigint, bigint, double, double, bigint, bigint, double, double, double, string, bigint, decimal(10,2), bigint, decimal(10,2), bigint, decimal(12,2), decimal(12,2), decimal(10,2), decimal(10,2), timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint, decimal(10,2), decimal(10,2)] Stage: Stage-0 Fetch Operator @@ -1053,7 +932,6 @@ PREHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY PREHOOK: Input: default@lineitem_test #### A masked pattern was here #### @@ -1096,109 +974,108 @@ POSTHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY POSTHOOK: Input: default@lineitem_test #### A masked pattern was here #### quantity quantity_description quantity_description_2 quantity_description_3 expected_date field_1 field_2 field_3 field_4 field_5 field_6 field_7 field_8 field_9 field_10 field_11 field_12 -NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 -1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 1 Single Single Single 1994-01-31 0.0 0.0 0.05 0.05 0.05 0.05 0.05 0.05 1994-01-28 00:00:00 NULL -36 2009-01-01 -2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 -2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 -3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 -3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 -3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 -4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 -4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 -5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 -5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 -5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 -6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 -6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 -7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 -8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 -8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 -9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 11 Many Many NULL 1994-03-22 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1994-03-27 00:00:00 NULL 10 2009-01-01 12 Many Many NULL 1996-05-12 12655.998 12655.998 0.03 0.03 0.03 0.03 0.03 0.03 1996-06-03 00:00:00 NULL 82 2009-01-01 12 Many Many NULL 1997-02-01 12156.034800000001 12156.034800000001 0.05 NULL 0.05 0.00 0.05 0.00 1997-02-22 00:00:00 NULL 1 2009-01-01 -13 Many Many NULL 1998-10-28 17554.68 17554.68 0.07 NULL 0.07 0.00 0.07 0.00 1998-11-06 00:00:00 NULL 53 2009-01-01 13 Many Many NULL 1993-04-06 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1993-04-08 00:00:00 NULL 4 2009-01-01 13 Many Many NULL 1994-03-08 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-03-26 00:00:00 NULL 41 2009-01-01 +13 Many Many NULL 1998-10-28 17554.68 17554.68 0.07 NULL 0.07 0.00 0.07 0.00 1998-11-06 00:00:00 NULL 53 2009-01-01 14 Many Many NULL 1995-01-04 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1995-01-27 00:00:00 NULL 66 2009-01-01 15 Many Many NULL 1994-11-05 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-11-20 00:00:00 NULL 81 2009-12-31 -17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 17 Many Many NULL 1994-07-07 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-07-03 00:00:00 NULL -4 2009-01-01 -19 Many Many NULL 1994-02-05 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1994-02-06 00:00:00 NULL -11 2009-01-01 +17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 19 Many Many NULL 1993-05-19 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1993-05-25 00:00:00 NULL 81 2009-01-01 +19 Many Many NULL 1994-02-05 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1994-02-06 00:00:00 NULL -11 2009-01-01 +2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 +2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 20 Many Many NULL 1998-07-02 32042.592 32042.592 0.01 NULL 0.01 0.00 0.01 0.00 1998-07-02 00:00:00 NULL 40 2009-01-01 -21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 21 Many Many NULL 1994-10-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-10-26 00:00:00 NULL 38 2009-01-01 -22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 +21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 22 Many Many NULL 1995-07-22 39353.82 39353.82 0.05 NULL 0.05 0.00 0.05 0.00 1995-07-19 00:00:00 NULL 45 2009-01-01 -23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 +22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 23 Many Many NULL 1994-07-24 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-07-25 00:00:00 NULL 26 2009-01-01 23 Many Many NULL 1994-10-13 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-10-24 00:00:00 NULL 79 2009-12-31 +23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 24 Many Many NULL 1996-02-26 31762.584 31762.584 0.00 0.00 0.00 0.00 0.00 0.00 1996-03-18 00:00:00 NULL 75 2009-01-01 24 Many Many NULL 1996-04-04 20542.032 20542.032 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-01 00:00:00 NULL 18 2009-12-31 25 Many Many NULL 1995-12-06 27263.995 27263.995 NULL NULL 0.00 0.00 0.00 0.00 1995-12-21 00:00:00 NULL -4 2009-01-01 25 Many Many NULL 1998-04-15 43064.1575 43064.1575 0.07 NULL 0.07 0.00 0.07 0.00 1998-04-11 00:00:00 NULL -11 2009-01-01 -26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 -26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 26 Many Many NULL 1993-11-03 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1993-11-04 00:00:00 NULL -44 2009-01-01 26 Many Many NULL 1994-10-21 0.0 0.0 0.08 NULL 0.08 0.00 0.08 0.00 1994-10-19 00:00:00 NULL 24 2009-01-01 -27 Many Many NULL 1998-06-29 45590.2425 45590.2425 NULL NULL 0.00 0.00 0.00 0.00 1998-06-29 00:00:00 NULL 4 2009-01-01 +26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 +26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 27 Many Many NULL 1994-01-26 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-01-23 00:00:00 NULL 62 2009-01-01 +27 Many Many NULL 1998-06-29 45590.2425 45590.2425 NULL NULL 0.00 0.00 0.00 0.00 1998-06-29 00:00:00 NULL 4 2009-01-01 28 Many Many NULL 1993-12-19 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-01-01 00:00:00 NULL -9 2009-01-01 -28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 28 Many Many NULL 1994-12-29 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1995-01-16 00:00:00 NULL 83 2009-01-01 28 Many Many NULL 1995-10-28 44866.219999999994 44866.219999999994 0.08 0.08 0.08 0.08 0.08 0.08 1995-10-26 00:00:00 NULL 60 2009-01-01 28 Many Many NULL 1996-02-06 45975.3616 45975.3616 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-28 00:00:00 NULL 66 2009-01-01 28 Many Many NULL 1996-03-26 30855.6612 30855.6612 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-20 00:00:00 NULL 12 2009-12-31 +28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 29 Many Many NULL 1997-01-30 39341.806 39341.806 NULL NULL 0.00 0.00 0.00 0.00 1997-01-27 00:00:00 NULL 0 2009-01-01 -30 Many Many NULL 1998-08-16 44561.46 44561.46 0.06 NULL 0.06 0.00 0.06 0.00 1998-08-14 00:00:00 NULL 34 2009-12-31 -30 Many Many NULL 1996-01-15 29770.173 29770.173 NULL NULL 0.00 0.00 0.00 0.00 1996-01-18 00:00:00 NULL 35 2009-12-31 +3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 +3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 +3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 30 Many Many NULL 1994-06-08 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-06-22 00:00:00 NULL 24 2009-01-01 +30 Many Many NULL 1996-01-15 29770.173 29770.173 NULL NULL 0.00 0.00 0.00 0.00 1996-01-18 00:00:00 NULL 35 2009-12-31 +30 Many Many NULL 1998-08-16 44561.46 44561.46 0.06 NULL 0.06 0.00 0.06 0.00 1998-08-14 00:00:00 NULL 34 2009-12-31 31 Many Many NULL 1993-11-03 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1993-11-08 00:00:00 NULL -41 2009-01-01 31 Many Many NULL 1994-02-24 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-02-20 00:00:00 NULL -19 2009-01-01 -32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 32 Many Many NULL 1993-12-14 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1993-12-28 00:00:00 NULL -7 2009-12-31 -32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 32 Many Many NULL 1994-08-29 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-08-31 00:00:00 NULL 14 2009-01-01 +32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 32 Many Many NULL 1996-02-04 46146.7488 46146.7488 NULL NULL 0.00 0.00 0.00 0.00 1996-02-03 00:00:00 NULL -4 2009-01-01 +32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 33 Many Many NULL 1998-04-17 54174.12 54174.12 0.01 NULL 0.01 0.00 0.01 0.00 1998-04-15 00:00:00 NULL 26 2009-01-01 -34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 34 Many Many NULL 1995-11-13 60586.5448 60586.5448 0.06 NULL 0.06 0.00 0.06 0.00 1995-11-26 00:00:00 NULL -50 2009-01-01 +34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 34 Many Many NULL 1998-03-10 56487.763199999994 56487.763199999994 NULL NULL 0.00 0.00 0.00 0.00 1998-03-30 00:00:00 NULL -23 2009-01-01 35 Many Many NULL 1996-01-21 40475.225 40475.225 0.03 0.03 0.03 0.03 0.03 0.03 1996-01-22 00:00:00 NULL -32 2009-01-01 36 Many Many NULL 1996-04-17 41844.6756 41844.6756 0.06 0.06 0.06 0.06 0.06 0.06 1996-04-20 00:00:00 NULL 52 2009-01-01 -37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 37 Many Many NULL 1992-05-02 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1992-05-02 00:00:00 NULL -13 2009-01-01 +37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 37 Many Many NULL 1994-02-18 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-02-21 00:00:00 NULL -23 2009-01-01 -38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 38 Many Many NULL 1996-02-16 68028.3144 68028.3144 NULL NULL 0.00 0.00 0.00 0.00 1996-02-18 00:00:00 NULL -6 2009-01-01 +38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 39 Many Many NULL 1992-07-07 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1992-07-28 00:00:00 NULL -21 2009-01-01 39 Many Many NULL 1998-02-03 45146.01 45146.01 NULL NULL 0.00 0.00 0.00 0.00 1998-02-18 00:00:00 NULL -48 2009-01-01 +4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 +4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 40 Many Many NULL 1992-07-26 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1992-08-15 00:00:00 NULL 14 2009-01-01 40 Many Many NULL 1996-12-13 51224.736 51224.736 0.05 NULL 0.05 0.00 0.05 0.00 1997-01-01 00:00:00 NULL 71 2009-01-01 +41 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-11 00:00:00 NULL -74 2009-01-01 41 Many Many NULL 1994-02-26 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-03-18 00:00:00 NULL 17 2009-01-01 41 Many Many NULL 1998-07-04 47989.6144 47989.6144 0.08 NULL 0.08 0.00 0.08 0.00 1998-07-06 00:00:00 NULL 9 2009-01-01 -41 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-11 00:00:00 NULL -74 2009-01-01 42 Many Many NULL 1994-08-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-28 00:00:00 NULL 33 2009-12-31 42 Many Many NULL 1996-02-13 68289.9672 68289.9672 0.00 NULL 0.00 0.00 0.00 0.00 1996-02-23 00:00:00 NULL 33 2009-01-01 -43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 43 Many Many NULL 1992-07-15 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1992-08-02 00:00:00 NULL 27 2009-01-01 -44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 +43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 44 Many Many NULL 1995-09-02 75106.658 75106.658 NULL NULL 0.00 0.00 0.00 0.00 1995-09-14 00:00:00 NULL 25 2009-01-01 44 Many Many NULL 1996-10-04 80882.4192 80882.4192 0.02 NULL 0.02 0.00 0.02 0.00 1996-09-30 00:00:00 NULL -48 2009-01-01 44 Many Many NULL 1996-11-19 48941.692800000004 48941.692800000004 0.06 NULL 0.06 0.00 0.06 0.00 1996-12-12 00:00:00 NULL -3 2009-01-01 +44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 45 Many Many NULL 1994-02-07 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-02-23 00:00:00 NULL 50 2009-01-01 45 Many Many NULL 1998-03-05 61489.35 61489.35 NULL NULL 0.00 0.00 0.00 0.00 1998-03-24 00:00:00 NULL 4 2009-01-01 46 Many Many NULL 1996-01-20 73475.892 73475.892 0.07 NULL 0.07 0.00 0.07 0.00 1996-02-03 00:00:00 NULL -53 2009-01-01 46 Many Many NULL 1996-10-01 77781.4092 77781.4092 NULL NULL 0.00 0.00 0.00 0.00 1996-10-26 00:00:00 NULL -54 2009-01-01 -46 Many Many NULL 1998-08-18 84565.5168 84565.5168 0.05 NULL 0.05 0.00 0.05 0.00 1998-08-29 00:00:00 NULL 52 2009-01-01 46 Many Many NULL 1998-07-01 56583.5144 56583.5144 0.05 NULL 0.05 0.00 0.05 0.00 1998-07-05 00:00:00 NULL 28 2009-01-01 +46 Many Many NULL 1998-08-18 84565.5168 84565.5168 0.05 NULL 0.05 0.00 0.05 0.00 1998-08-29 00:00:00 NULL 52 2009-01-01 48 Many Many NULL 1994-08-22 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-09-08 00:00:00 NULL 28 2009-01-01 49 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-24 00:00:00 NULL -26 2009-12-31 +5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 +5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 +5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 50 Many Many NULL 1994-08-13 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-26 00:00:00 NULL -48 2009-12-31 +6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 +6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 +7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 +8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 +8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 +9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 diff --git a/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out b/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out index b29fd4b..9be5235 100644 --- a/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out @@ -426,8 +426,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: true @@ -711,8 +711,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out b/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out index bbaa05c..f801856 100644 --- a/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out @@ -171,8 +171,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_char_2.q.out b/ql/src/test/results/clientpositive/llap/vector_char_2.q.out index be7c367..73e8060 100644 --- a/ql/src/test/results/clientpositive/llap/vector_char_2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_char_2.q.out @@ -132,8 +132,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -328,8 +328,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_char_4.q.out b/ql/src/test/results/clientpositive/llap/vector_char_4.q.out index ca4acf1..a418e7a 100644 --- a/ql/src/test/results/clientpositive/llap/vector_char_4.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_char_4.q.out @@ -174,8 +174,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out b/ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out index 72cd1d3..7aa82d0 100644 --- a/ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out @@ -195,8 +195,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -238,8 +238,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -349,8 +349,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -405,8 +405,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -519,8 +519,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -575,8 +575,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_char_simple.q.out b/ql/src/test/results/clientpositive/llap/vector_char_simple.q.out index 696359b..c283674 100644 --- a/ql/src/test/results/clientpositive/llap/vector_char_simple.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_char_simple.q.out @@ -75,8 +75,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -161,8 +161,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -259,8 +259,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out b/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out index 1a32227..5a23539 100644 --- a/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out @@ -83,8 +83,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true @@ -231,8 +230,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_coalesce.q.out b/ql/src/test/results/clientpositive/llap/vector_coalesce.q.out index 339df62..bc00c98 100644 --- a/ql/src/test/results/clientpositive/llap/vector_coalesce.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_coalesce.q.out @@ -46,8 +46,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -154,8 +154,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -262,8 +262,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -346,8 +346,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -449,8 +449,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -531,8 +531,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out b/ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out index 451a6e1..f43c9ea 100644 --- a/ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out @@ -99,8 +99,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -223,8 +223,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -324,8 +324,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -448,8 +448,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out b/ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out index d8b2ced..d05dd70 100644 --- a/ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out @@ -135,8 +135,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -180,8 +180,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_coalesce_4.q.out b/ql/src/test/results/clientpositive/llap/vector_coalesce_4.q.out index 409c68c..e609d14 100644 --- a/ql/src/test/results/clientpositive/llap/vector_coalesce_4.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_coalesce_4.q.out @@ -81,8 +81,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_complex_all.q.out b/ql/src/test/results/clientpositive/llap/vector_complex_all.q.out index 87525fe..d5ea64f 100644 --- a/ql/src/test/results/clientpositive/llap/vector_complex_all.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_complex_all.q.out @@ -117,8 +117,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -199,8 +199,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -281,8 +281,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -363,8 +363,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -445,8 +445,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -527,8 +527,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -611,8 +611,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -959,8 +959,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1185,8 +1185,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1328,8 +1328,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_complex_join.q.out b/ql/src/test/results/clientpositive/llap/vector_complex_join.q.out index 589e807..c4f59f0 100644 --- a/ql/src/test/results/clientpositive/llap/vector_complex_join.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_complex_join.q.out @@ -91,8 +91,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -134,8 +134,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -270,8 +270,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -314,8 +314,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -413,8 +413,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -457,8 +457,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_count.q.out b/ql/src/test/results/clientpositive/llap/vector_count.q.out index ce35eb8..20a655b 100644 --- a/ql/src/test/results/clientpositive/llap/vector_count.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_count.q.out @@ -271,8 +271,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -366,8 +366,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out b/ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out index 42fcdbd..dd54bd5 100644 --- a/ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out @@ -1289,8 +1289,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out b/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out index c08154e..f312244 100644 --- a/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out @@ -79,8 +79,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -191,8 +190,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -303,8 +301,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_data_types.q.out b/ql/src/test/results/clientpositive/llap/vector_data_types.q.out index a709210..a1d18cd 100644 --- a/ql/src/test/results/clientpositive/llap/vector_data_types.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_data_types.q.out @@ -260,8 +260,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -399,8 +399,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_date_1.q.out b/ql/src/test/results/clientpositive/llap/vector_date_1.q.out index 2a77c39..9d96979 100644 --- a/ql/src/test/results/clientpositive/llap/vector_date_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_date_1.q.out @@ -139,8 +139,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -304,8 +304,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -469,8 +469,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -634,8 +634,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -803,8 +803,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -970,8 +970,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1081,8 +1081,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out index 05c43fb..b0e5787 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out @@ -72,22 +72,22 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToBoolean(t) (type: boolean) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToBoolean(col 0:decimal(4,2)) -> 4:boolean + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToBoolean(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:boolean Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumnNums: [5] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -97,8 +97,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -106,9 +106,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(4,2), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -189,22 +189,22 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToByte(t) (type: tinyint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToLong(col 0:decimal(4,2)) -> 4:tinyint + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToLong(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:tinyint Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: tinyint) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumnNums: [5] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -214,8 +214,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -223,9 +223,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(4,2), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -306,22 +306,22 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToShort(t) (type: smallint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToLong(col 0:decimal(4,2)) -> 4:smallint + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToLong(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:smallint Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: smallint) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumnNums: [5] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -331,8 +331,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -340,9 +340,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(4,2), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -423,22 +423,22 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToInteger(t) (type: int) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToLong(col 0:decimal(4,2)) -> 4:int + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToLong(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:int Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumnNums: [5] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -448,8 +448,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -457,9 +457,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(4,2), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -540,22 +540,22 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToLong(t) (type: bigint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToLong(col 0:decimal(4,2)) -> 4:bigint + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToLong(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:bigint Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumnNums: [5] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -565,8 +565,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -574,9 +574,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(4,2), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -657,22 +657,22 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToFloat(t) (type: float) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToFloat(col 0:decimal(4,2)) -> 4:float + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToFloat(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:float Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: float) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumnNums: [5] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -682,8 +682,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -691,9 +691,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [double] + scratchColumnTypeNames: [decimal(4,2), double] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -774,22 +774,22 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToDouble(t) (type: double) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToDouble(col 0:decimal(4,2)) -> 4:double + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToDouble(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:double Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumnNums: [5] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -799,8 +799,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -808,9 +808,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [double] + scratchColumnTypeNames: [decimal(4,2), double] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -891,22 +891,22 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToString(t) (type: string) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToString(col 0:decimal(4,2)) -> 4:string + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToString(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:string Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumnNums: [5] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -916,8 +916,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -925,9 +925,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [string] + scratchColumnTypeNames: [decimal(4,2), string] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1008,22 +1008,22 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: CAST( t AS TIMESTAMP) (type: timestamp) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToTimestamp(col 0:decimal(4,2)) -> 4:timestamp + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToTimestamp(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:timestamp Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: timestamp) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumnNums: [5] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -1033,8 +1033,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1042,9 +1042,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [timestamp] + scratchColumnTypeNames: [decimal(4,2), timestamp] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out index 5e835cd..3170625 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out @@ -62,7 +62,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)) outputColumnNames: _col0 @@ -86,8 +86,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -95,7 +95,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -178,7 +178,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)) outputColumnNames: _col0 @@ -203,8 +203,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -212,7 +211,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out index 8e02351..8f0cc4d 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out @@ -50,22 +50,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToBoolean(t) (type: boolean) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToBoolean(col 0:decimal(18,9)) -> 2:boolean + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToBoolean(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:boolean Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -75,8 +75,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -84,9 +84,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(18,9), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -166,22 +166,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToByte(t) (type: tinyint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToLong(col 0:decimal(18,9)) -> 2:tinyint + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToLong(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:tinyint Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: tinyint) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -191,8 +191,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -200,9 +200,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(18,9), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -282,22 +282,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToShort(t) (type: smallint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToLong(col 0:decimal(18,9)) -> 2:smallint + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToLong(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:smallint Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: smallint) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -307,8 +307,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -316,9 +316,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(18,9), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -398,22 +398,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToInteger(t) (type: int) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToLong(col 0:decimal(18,9)) -> 2:int + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToLong(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:int Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -423,8 +423,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -432,9 +432,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(18,9), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -514,22 +514,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToLong(t) (type: bigint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToLong(col 0:decimal(18,9)) -> 2:bigint + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToLong(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -539,8 +539,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -548,9 +548,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(18,9), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -630,22 +630,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToFloat(t) (type: float) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToFloat(col 0:decimal(18,9)) -> 2:float + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToFloat(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:float Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: float) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -655,8 +655,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -664,9 +664,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [double] + scratchColumnTypeNames: [decimal(18,9), double] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -746,22 +746,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToDouble(t) (type: double) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToDouble(col 0:decimal(18,9)) -> 2:double + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToDouble(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:double Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -771,8 +771,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -780,9 +780,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [double] + scratchColumnTypeNames: [decimal(18,9), double] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -862,22 +862,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToString(t) (type: string) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToString(col 0:decimal(18,9)) -> 2:string + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToString(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -887,8 +887,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -896,9 +896,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [string] + scratchColumnTypeNames: [decimal(18,9), string] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -989,22 +989,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToBoolean(t) (type: boolean) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToBoolean(col 0:decimal(18,9)) -> 2:boolean + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToBoolean(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:boolean Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -1014,8 +1014,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1023,9 +1023,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(18,9), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1105,22 +1105,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToByte(t) (type: tinyint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToLong(col 0:decimal(18,9)) -> 2:tinyint + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToLong(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:tinyint Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: tinyint) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -1130,8 +1130,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1139,9 +1139,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(18,9), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1221,22 +1221,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToShort(t) (type: smallint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToLong(col 0:decimal(18,9)) -> 2:smallint + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToLong(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:smallint Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: smallint) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -1246,8 +1246,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1255,9 +1255,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(18,9), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1337,22 +1337,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToInteger(t) (type: int) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToLong(col 0:decimal(18,9)) -> 2:int + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToLong(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:int Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -1362,8 +1362,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1371,9 +1371,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(18,9), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1453,22 +1453,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToLong(t) (type: bigint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToLong(col 0:decimal(18,9)) -> 2:bigint + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToLong(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -1478,8 +1478,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1487,9 +1487,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(18,9), bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1569,22 +1569,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToFloat(t) (type: float) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToFloat(col 0:decimal(18,9)) -> 2:float + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToFloat(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:float Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: float) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -1594,8 +1594,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1603,9 +1603,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [double] + scratchColumnTypeNames: [decimal(18,9), double] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1685,22 +1685,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToDouble(t) (type: double) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToDouble(col 0:decimal(18,9)) -> 2:double + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToDouble(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:double Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -1710,8 +1710,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1719,9 +1719,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [double] + scratchColumnTypeNames: [decimal(18,9), double] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1801,22 +1801,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: UDFToString(t) (type: string) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2] - selectExpressions: CastDecimalToString(col 0:decimal(18,9)) -> 2:string + projectedOutputColumnNums: [3] + selectExpressions: CastDecimalToString(col 2:decimal(18,9))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,9)/DECIMAL_64) -> 2:decimal(18,9)) -> 3:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -1826,8 +1826,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1835,9 +1835,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [string] + scratchColumnTypeNames: [decimal(18,9), string] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1914,7 +1914,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: 3.14 (type: decimal(4,2)) outputColumnNames: _col0 @@ -1939,8 +1939,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1948,7 +1948,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [decimal(4,2)] @@ -1993,7 +1993,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: 3.14 (type: decimal(4,2)) outputColumnNames: _col0 @@ -2018,8 +2018,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2027,7 +2027,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [decimal(4,2)] @@ -2072,7 +2072,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: 1355944339.1234567 (type: decimal(30,8)) outputColumnNames: _col0 @@ -2097,8 +2097,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2106,7 +2106,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [decimal(30,8)] @@ -2151,7 +2151,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: 1 (type: decimal(10,0)) outputColumnNames: _col0 @@ -2176,8 +2176,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2185,7 +2185,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [decimal(10,0)] @@ -2221,7 +2221,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: 1 (type: decimal(10,0)) outputColumnNames: _col0 @@ -2246,8 +2246,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2255,7 +2255,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [decimal(10,0)] @@ -2300,7 +2300,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: 3 (type: decimal(10,0)) outputColumnNames: _col0 @@ -2325,8 +2325,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2334,7 +2334,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [decimal(10,0)] @@ -2379,7 +2379,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: 3 (type: decimal(10,0)) outputColumnNames: _col0 @@ -2404,8 +2404,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2413,7 +2413,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [decimal(10,0)] @@ -2458,7 +2458,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: 3 (type: decimal(10,0)) outputColumnNames: _col0 @@ -2483,8 +2483,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2492,7 +2492,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [decimal(10,0)] @@ -2537,7 +2537,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: 3 (type: decimal(10,0)) outputColumnNames: _col0 @@ -2562,8 +2562,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2571,7 +2571,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [decimal(10,0)] @@ -2616,7 +2616,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: 1 (type: decimal(20,19)) outputColumnNames: _col0 @@ -2641,8 +2641,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2650,7 +2650,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [decimal(20,19)] @@ -2695,7 +2695,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(18,9), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: 0.99999999999999999999 (type: decimal(20,20)) outputColumnNames: _col0 @@ -2720,8 +2720,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2729,7 +2729,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [] - dataColumns: t:decimal(18,9) + dataColumns: t:decimal(18,9)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [decimal(20,20)] diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out index 0bfd12e..5bea214 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out @@ -129,6 +129,44 @@ NULL 124.00000 125.20000 200.00000 +PREHOOK: query: explain SELECT cast(key as decimal) FROM DECIMAL_5 +PREHOOK: type: QUERY +POSTHOOK: query: explain SELECT cast(key as decimal) FROM DECIMAL_5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_5 + Statistics: Num rows: 38 Data size: 4032 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( key AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4032 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4032 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT cast(key as decimal) FROM DECIMAL_5 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_5 @@ -137,42 +175,42 @@ POSTHOOK: query: SELECT cast(key as decimal) FROM DECIMAL_5 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_5 #### A masked pattern was here #### --4400 +-440000000 NULL 0 0 -100 -10 -1 -0 -0 -200 -20 -2 -0 -0 +10000000 +1000000 +100000 +10000 +1000 +20000000 +2000000 +200000 0 -0 -0 -0 -0 -0 -0 -1 -2 -3 --1 --1 --1 -1 -1 -124 -125 --1255 -3 -3 -3 -1 +20000 +2000 +30000 +33000 +33300 +-30000 +-33000 +-33300 +100000 +200000 +314000 +-112000 +-112000 +-112200 +112000 +112200 +12400000 +12520000 +-125549000 +314000 +314000 +314000 +100000 NULL NULL PREHOOK: query: SELECT cast(key as decimal(6,3)) FROM DECIMAL_5 @@ -187,38 +225,38 @@ NULL NULL 0.000 0.000 +NULL +NULL 100.000 10.000 1.000 -0.100 -0.010 +NULL +NULL 200.000 -20.000 -2.000 0.000 -0.200 -0.020 -0.300 -0.330 -0.333 --0.300 --0.330 --0.333 -1.000 +20.000 2.000 -3.140 --1.120 --1.120 --1.122 -1.120 -1.122 -124.000 -125.200 -NULL -3.140 -3.140 -3.140 -1.000 +30.000 +33.000 +33.300 +-30.000 +-33.000 +-33.300 +100.000 +200.000 +314.000 +-112.000 +-112.000 +-112.200 +112.000 +112.200 +NULL +NULL +NULL +314.000 +314.000 +314.000 +100.000 NULL NULL PREHOOK: query: DROP TABLE DECIMAL_5_txt diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out index 800a4ae..705bf8b 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out @@ -135,7 +135,7 @@ STAGE PLANS: Statistics: Num rows: 27 Data size: 2684 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(10,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(10,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: key (type: decimal(10,5)), value (type: int) outputColumnNames: _col0, _col1 @@ -159,8 +159,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -168,7 +168,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(10,5), value:int + dataColumns: key:decimal(10,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -276,7 +276,7 @@ STAGE PLANS: Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(17,4), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(17,4)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: key (type: decimal(17,4)), value (type: int) outputColumnNames: _col0, _col1 @@ -300,8 +300,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -309,7 +309,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(17,4), value:int + dataColumns: key:decimal(17,4)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -427,7 +427,7 @@ STAGE PLANS: Statistics: Num rows: 27 Data size: 2576 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(10,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(10,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: CAST( key AS decimal(18,5)) (type: decimal(18,5)) outputColumnNames: _col0 @@ -435,7 +435,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3] - selectExpressions: CastDecimalToDecimal(col 0:decimal(10,5)) -> 3:decimal(18,5) + selectExpressions: ConvertDecimal64ToDecimal(col 0:decimal(18,5)/DECIMAL_64) -> 3:decimal(18,5) Statistics: Num rows: 27 Data size: 2576 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(18,5)) @@ -452,8 +452,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -461,7 +461,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(10,5), value:int + dataColumns: key:decimal(10,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [decimal(18,5)] Map 4 @@ -471,7 +471,7 @@ STAGE PLANS: Statistics: Num rows: 27 Data size: 3024 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(17,4), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(17,4)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: CAST( key AS decimal(18,5)) (type: decimal(18,5)) outputColumnNames: _col0 @@ -479,7 +479,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3] - selectExpressions: CastDecimalToDecimal(col 0:decimal(17,4)) -> 3:decimal(18,5) + selectExpressions: ConvertDecimal64ToDecimal(col 0:decimal(18,5)/DECIMAL_64) -> 3:decimal(18,5) Statistics: Num rows: 27 Data size: 3024 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(18,5)) @@ -496,8 +496,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -505,7 +505,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(17,4), value:int + dataColumns: key:decimal(17,4)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [decimal(18,5)] Reducer 3 @@ -575,54 +575,54 @@ NULL NULL NULL NULL --1234567890.12350 +-123456789.01235 -4400.00000 --4400.00000 --1255.49000 -1255.49000 --1.12200 +-440.00000 +-125.54900 -1.12200 -1.12000 --1.12000 --0.33300 -0.33300 -0.30000 --0.30000 +-0.11220 +-0.11200 +-0.03330 +-0.03000 0.00000 0.00000 0.00000 0.00000 -0.33300 +0.03330 +0.10000 +0.10000 +0.11200 +0.11220 +0.20000 +0.31400 +0.31400 +0.31400 0.33300 1.00000 1.00000 1.00000 -1.00000 -1.12000 +1.07343 1.12000 1.12200 -1.12200 -2.00000 2.00000 3.14000 3.14000 3.14000 -3.14000 -3.14000 -3.14000 -10.00000 10.00000 -10.73430 10.73433 +12.40000 +12.52000 124.00000 -124.00000 -125.20000 125.20000 +2323.22344 23232.23435 -23232.23440 -2389432.23750 -2389432.23750 -1234567890.12350 +238943.22375 +238943.22375 +123456789.01235 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v PREHOOK: type: CREATETABLE_AS_SELECT @@ -655,7 +655,7 @@ STAGE PLANS: Statistics: Num rows: 27 Data size: 2684 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(10,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(10,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (key + 5.5) (type: decimal(11,5)), (value * 11) (type: int) outputColumnNames: _col0, _col1 @@ -663,7 +663,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3, 4] - selectExpressions: DecimalColAddDecimalScalar(col 0:decimal(10,5), val 5.5) -> 3:decimal(11,5), LongColMultiplyLongScalar(col 1:int, val 11) -> 4:int + selectExpressions: Decimal64ColAddDecimal64Scalar(col 0:decimal(10,5)/DECIMAL_64, decimal64Val 550000, decimalVal 5.5) -> 3:decimal(11,5)/DECIMAL_64, LongColMultiplyLongScalar(col 1:int, val 11) -> 4:int Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: int) @@ -681,8 +681,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -690,9 +690,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(10,5), value:int + dataColumns: key:decimal(10,5)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,5), bigint] + scratchColumnTypeNames: [decimal(11,5)/DECIMAL_64, bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out index 6cd1e8d..30a6770 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out @@ -111,8 +111,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -291,8 +291,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -465,7 +465,7 @@ STAGE PLANS: Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5), 2:cdecimal2:decimal(16,0), 3:cint:int, 4:ROW__ID:struct] + vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5)/DECIMAL_64, 2:cdecimal2:decimal(16,0)/DECIMAL_64, 3:cint:int, 4:ROW__ID:struct] Select Operator expressions: cdecimal1 (type: decimal(11,5)), cdecimal2 (type: decimal(16,0)), cint (type: int) outputColumnNames: cdecimal1, cdecimal2, cint @@ -477,7 +477,7 @@ STAGE PLANS: Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), count() Group By Vectorization: - aggregators: VectorUDAFCount(col 1:decimal(11,5)) -> bigint, VectorUDAFMaxDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFMinDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFSumDecimal(col 1:decimal(11,5)) -> decimal(21,5), VectorUDAFCount(col 2:decimal(16,0)) -> bigint, VectorUDAFMaxDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFMinDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFSumDecimal(col 2:decimal(16,0)) -> decimal(26,0), VectorUDAFCountStar(*) -> bigint + aggregators: VectorUDAFCount(col 1:decimal(11,5)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFMinDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> decimal(21,5), VectorUDAFCount(col 2:decimal(16,0)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFMinDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> decimal(26,0), VectorUDAFCountStar(*) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 3:int @@ -506,8 +506,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -515,7 +514,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 4 includeColumns: [1, 2, 3] - dataColumns: cdouble:double, cdecimal1:decimal(11,5), cdecimal2:decimal(16,0), cint:int + dataColumns: cdouble:double, cdecimal1:decimal(11,5)/DECIMAL_64, cdecimal2:decimal(16,0)/DECIMAL_64, cint:int partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -664,20 +663,20 @@ STAGE PLANS: Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5), 2:cdecimal2:decimal(16,0), 3:cint:int, 4:ROW__ID:struct] + vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5)/DECIMAL_64, 2:cdecimal2:decimal(16,0)/DECIMAL_64, 3:cint:int, 4:ROW__ID:struct] Select Operator expressions: cint (type: int), cdecimal1 (type: decimal(11,5)), cdecimal2 (type: decimal(16,0)), UDFToDouble(cdecimal1) (type: double), (UDFToDouble(cdecimal1) * UDFToDouble(cdecimal1)) (type: double), UDFToDouble(cdecimal2) (type: double), (UDFToDouble(cdecimal2) * UDFToDouble(cdecimal2)) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [3, 1, 2, 5, 8, 6, 10] - selectExpressions: CastDecimalToDouble(col 1:decimal(11,5)) -> 5:double, DoubleColMultiplyDoubleColumn(col 6:double, col 7:double)(children: CastDecimalToDouble(col 1:decimal(11,5)) -> 6:double, CastDecimalToDouble(col 1:decimal(11,5)) -> 7:double) -> 8:double, CastDecimalToDouble(col 2:decimal(16,0)) -> 6:double, DoubleColMultiplyDoubleColumn(col 7:double, col 9:double)(children: CastDecimalToDouble(col 2:decimal(16,0)) -> 7:double, CastDecimalToDouble(col 2:decimal(16,0)) -> 9:double) -> 10:double + projectedOutputColumnNums: [3, 1, 2, 6, 9, 7, 12] + selectExpressions: CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 6:double, DoubleColMultiplyDoubleColumn(col 7:double, col 8:double)(children: CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 7:double, CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 8:double) -> 9:double, CastDecimalToDouble(col 10:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) -> 7:double, DoubleColMultiplyDoubleColumn(col 8:double, col 11:double)(children: CastDecimalToDouble(col 10:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) -> 8:double, CastDecimalToDouble(col 10:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) -> 11:double) -> 12:double Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(_col1), max(_col1), min(_col1), sum(_col1), sum(_col4), sum(_col3), count(_col2), max(_col2), min(_col2), sum(_col2), sum(_col6), sum(_col5), count() Group By Vectorization: - aggregators: VectorUDAFCount(col 1:decimal(11,5)) -> bigint, VectorUDAFMaxDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFMinDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFSumDecimal(col 1:decimal(11,5)) -> decimal(21,5), VectorUDAFSumDouble(col 8:double) -> double, VectorUDAFSumDouble(col 5:double) -> double, VectorUDAFCount(col 2:decimal(16,0)) -> bigint, VectorUDAFMaxDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFMinDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFSumDecimal(col 2:decimal(16,0)) -> decimal(26,0), VectorUDAFSumDouble(col 10:double) -> double, VectorUDAFSumDouble(col 6:double) -> double, VectorUDAFCountStar(*) -> bigint + aggregators: VectorUDAFCount(col 1:decimal(11,5)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFMinDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> decimal(21,5), VectorUDAFSumDouble(col 9:double) -> double, VectorUDAFSumDouble(col 6:double) -> double, VectorUDAFCount(col 2:decimal(16,0)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFMinDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> decimal(26,0), VectorUDAFSumDouble(col 12:double) -> double, VectorUDAFSumDouble(col 7:double) -> double, VectorUDAFCountStar(*) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 3:int @@ -706,8 +705,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -715,9 +713,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 4 includeColumns: [1, 2, 3] - dataColumns: cdouble:double, cdecimal1:decimal(11,5), cdecimal2:decimal(16,0), cint:int + dataColumns: cdouble:double, cdecimal1:decimal(11,5)/DECIMAL_64, cdecimal2:decimal(16,0)/DECIMAL_64, cint:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double, double, double] + scratchColumnTypeNames: [decimal(11,5), double, double, double, double, decimal(16,0), double, double] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out index 67630b4..2414907 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out @@ -60,8 +60,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -184,8 +184,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out index c01637e..024ce07 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out @@ -94,8 +94,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -234,12 +234,12 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 2708600 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(10,3), 2:cdecimal2:decimal(7,2), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(10,3)/DECIMAL_64, 2:cdecimal2:decimal(7,2)/DECIMAL_64, 3:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterDecimalColGreaterDecimalScalar(col 1:decimal(10,3), val 0), FilterDecimalColLessDecimalScalar(col 1:decimal(10,3), val 12345.5678), FilterDecimalColNotEqualDecimalScalar(col 2:decimal(7,2), val 0), FilterDecimalColGreaterDecimalScalar(col 2:decimal(7,2), val 1000), SelectColumnIsNotNull(col 0:double)) + predicateExpression: FilterExprAndExpr(children: FilterDecimal64ColGreaterDecimal64Scalar(col 1:decimal(10,3)/DECIMAL_64, val 0), FilterDecimalColLessDecimalScalar(col 4:decimal(10,3), val 12345.5678)(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)), FilterDecimal64ColNotEqualDecimal64Scalar(col 2:decimal(7,2)/DECIMAL_64, val 0), FilterDecimal64ColGreaterDecimal64Scalar(col 2:decimal(7,2)/DECIMAL_64, val 100000), SelectColumnIsNotNull(col 0:double)) predicate: ((cdecimal1 < 12345.5678) and (cdecimal1 > 0) and (cdecimal2 <> 0) and (cdecimal2 > 1000) and cdouble is not null) (type: boolean) Statistics: Num rows: 455 Data size: 100294 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -248,15 +248,15 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20] - selectExpressions: DecimalColAddDecimalColumn(col 1:decimal(10,3), col 2:decimal(7,2)) -> 4:decimal(11,3), DecimalColSubtractDecimalColumn(col 1:decimal(10,3), col 5:decimal(9,2))(children: DecimalScalarMultiplyDecimalColumn(val 2, col 2:decimal(7,2)) -> 5:decimal(9,2)) -> 6:decimal(11,3), DecimalColDivideDecimalColumn(col 7:decimal(11,3), col 2:decimal(7,2))(children: DecimalColAddDecimalScalar(col 1:decimal(10,3), val 2.34) -> 7:decimal(11,3)) -> 8:decimal(21,11), DecimalColMultiplyDecimalColumn(col 1:decimal(10,3), col 9:decimal(12,6))(children: DecimalColDivideDecimalScalar(col 2:decimal(7,2), val 3.4) -> 9:decimal(12,6)) -> 10:decimal(23,9), DecimalColModuloDecimalScalar(col 1:decimal(10,3), val 10) -> 11:decimal(5,3), CastDecimalToLong(col 1:decimal(10,3)) -> 12:int, CastDecimalToLong(col 2:decimal(7,2)) -> 13:smallint, CastDecimalToLong(col 2:decimal(7,2)) -> 14:tinyint, CastDecimalToLong(col 1:decimal(10,3)) -> 15:bigint, CastDecimalToBoolean(col 1:decimal(10,3)) -> 16:boolean, CastDecimalToDouble(col 2:decimal(7,2)) -> 17:double, CastDecimalToFloat(col 1:decimal(10,3)) -> 18:float, CastDecimalToString(col 2:decimal(7,2)) -> 19:string, CastDecimalToTimestamp(col 1:decimal(10,3)) -> 20:timestamp + projectedOutputColumnNums: [6, 8, 10, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22] + selectExpressions: DecimalColAddDecimalColumn(col 4:decimal(10,3), col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 6:decimal(11,3), DecimalColSubtractDecimalColumn(col 4:decimal(10,3), col 7:decimal(9,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3), DecimalScalarMultiplyDecimalColumn(val 2, col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 7:decimal(9,2)) -> 8:decimal(11,3), DecimalColDivideDecimalColumn(col 23:decimal(11,3), col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 9:decimal(11,3)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,3)/DECIMAL_64, decimal64Val 2340, decimalVal 2.34) -> 9:decimal(11,3)/DECIMAL_64) -> 23:decimal(11,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 10:decimal(21,11), DecimalColMultiplyDecimalColumn(col 4:decimal(10,3), col 11:decimal(12,6))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3), DecimalColDivideDecimalScalar(col 5:decimal(7,2), val 3.4)(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 11:decimal(12,6)) -> 12:decimal(23,9), DecimalColModuloDecimalScalar(col 4:decimal(10,3), val 10)(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 13:decimal(5,3), CastDecimalToLong(col 4:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 14:int, CastDecimalToLong(col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 15:smallint, CastDecimalToLong(col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 16:tinyint, CastDecimalToLong(col 4:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 17:bigint, CastDecimalToBoolean(col 4:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 18:boolean, CastDecimalToDouble(col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 19:double, CastDecimalToFloat(col 4:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 20:float, CastDecimalToString(col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 21:string, CastDecimalToTimestamp(col 4:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 22:timestamp Statistics: Num rows: 455 Data size: 100294 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(11,3)), _col1 (type: decimal(11,3)), _col2 (type: decimal(21,11)), _col3 (type: decimal(23,9)), _col4 (type: decimal(5,3)), _col5 (type: int), _col6 (type: smallint), _col7 (type: tinyint), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: double), _col11 (type: float), _col12 (type: string), _col13 (type: timestamp) sort order: ++++++++++++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20] + keyColumnNums: [6, 8, 10, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [] @@ -267,8 +267,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -276,9 +276,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0, 1, 2] - dataColumns: cdouble:double, cdecimal1:decimal(10,3), cdecimal2:decimal(7,2) + dataColumns: cdouble:double, cdecimal1:decimal(10,3)/DECIMAL_64, cdecimal2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,3), decimal(9,2), decimal(11,3), decimal(11,3), decimal(21,11), decimal(12,6), decimal(23,9), decimal(5,3), bigint, bigint, bigint, bigint, bigint, double, double, string, timestamp] + scratchColumnTypeNames: [decimal(10,3), decimal(7,2), decimal(11,3), decimal(9,2), decimal(11,3), decimal(11,3)/DECIMAL_64, decimal(21,11), decimal(12,6), decimal(23,9), decimal(5,3), bigint, bigint, bigint, bigint, bigint, double, double, string, timestamp, decimal(11,3)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -360,4 +360,4 @@ ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_test_small_n0 #### A masked pattern was here #### -774841630076 +1273824888155 diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out index 7b56d27..61f6609 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out @@ -152,8 +152,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -203,8 +203,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -338,6 +338,17 @@ POSTHOOK: Input: default@t2_n29 9.00 9 9.00 9 9.00 9 +PREHOOK: query: select count(*) from (select t1_n48.`dec`, t2_n29.`dec` from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n48 +PREHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_n48.`dec`, t2_n29.`dec` from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n48 +POSTHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +106 PREHOOK: query: explain vectorization detail select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`) PREHOOK: type: QUERY @@ -416,8 +427,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -468,8 +479,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -603,6 +614,17 @@ POSTHOOK: Input: default@t2_n29 9.00 48.96 9 5 9.00 48.96 9 7 9.00 48.96 9 7 +PREHOOK: query: select count(*) from (select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n48 +PREHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n48 +POSTHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +106 PREHOOK: query: CREATE TABLE over1k_small(t tinyint, si smallint, i int, @@ -708,12 +730,12 @@ STAGE PLANS: Statistics: Num rows: 1049 Data size: 111776 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(14,2), 1:value_dec:decimal(14,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(14,2)/DECIMAL_64, 1:value_dec:decimal(14,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(14,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(14,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(14,2)/DECIMAL_64) -> 3:decimal(14,2)) predicate: dec is not null (type: boolean) Statistics: Num rows: 997 Data size: 106235 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -731,8 +753,8 @@ STAGE PLANS: 0 _col0 (type: decimal(16,2)) 1 _col0 (type: decimal(16,2)) Map Join Vectorization: - bigTableKeyExpressions: col 0:decimal(16,2) - bigTableValueExpressions: col 0:decimal(16,2) + bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 4:decimal(16,2) + bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 5:decimal(16,2) className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true @@ -758,8 +780,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -767,9 +788,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: dec:decimal(14,2), value_dec:decimal(14,2) + dataColumns: dec:decimal(14,2)/DECIMAL_64, value_dec:decimal(14,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(14,2), decimal(16,2), decimal(16,2)] Map 2 Map Operator Tree: TableScan @@ -777,12 +798,12 @@ STAGE PLANS: Statistics: Num rows: 1049 Data size: 111776 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(14,0), 1:value_dec:decimal(14,0), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(14,0)/DECIMAL_64, 1:value_dec:decimal(14,0)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(14,0)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(14,0))(children: ConvertDecimal64ToDecimal(col 0:decimal(14,0)/DECIMAL_64) -> 3:decimal(14,0)) predicate: dec is not null (type: boolean) Statistics: Num rows: 997 Data size: 106235 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -810,8 +831,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -819,9 +839,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: dec:decimal(14,0), value_dec:decimal(14,0) + dataColumns: dec:decimal(14,0)/DECIMAL_64, value_dec:decimal(14,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(14,0)] Stage: Stage-0 Fetch Operator @@ -839,112 +859,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### -14.00 14 -14.00 14 -14.00 14 -14.00 14 -14.00 14 -14.00 14 -14.00 14 -14.00 14 -14.00 14 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -45.00 45 -45.00 45 -45.00 45 -45.00 45 -45.00 45 -6.00 6 -6.00 6 -6.00 6 -6.00 6 -6.00 6 -6.00 6 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -70.00 70 -70.00 70 -70.00 70 -70.00 70 -70.00 70 -70.00 70 -70.00 70 -79.00 79 -79.00 79 -79.00 79 -79.00 79 -79.00 79 -79.00 79 -89.00 89 89.00 89 -89.00 89 -89.00 89 -89.00 89 -89.00 89 -89.00 89 -89.00 89 -89.00 89 -89.00 89 -89.00 89 -89.00 89 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 PREHOOK: query: explain vectorization detail select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`) PREHOOK: type: QUERY @@ -974,12 +900,12 @@ STAGE PLANS: Statistics: Num rows: 1049 Data size: 223552 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(14,2), 1:value_dec:decimal(14,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(14,2)/DECIMAL_64, 1:value_dec:decimal(14,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(14,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(14,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(14,2)/DECIMAL_64) -> 3:decimal(14,2)) predicate: dec is not null (type: boolean) Statistics: Num rows: 997 Data size: 212470 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -997,8 +923,8 @@ STAGE PLANS: 0 _col0 (type: decimal(16,2)) 1 _col0 (type: decimal(16,2)) Map Join Vectorization: - bigTableKeyExpressions: col 0:decimal(16,2) - bigTableValueExpressions: col 0:decimal(16,2), col 1:decimal(14,2) + bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 4:decimal(16,2) + bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 5:decimal(16,2), ConvertDecimal64ToDecimal(col 1:decimal(14,2)/DECIMAL_64) -> 3:decimal(14,2) className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true @@ -1024,8 +950,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1033,9 +958,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: dec:decimal(14,2), value_dec:decimal(14,2) + dataColumns: dec:decimal(14,2)/DECIMAL_64, value_dec:decimal(14,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(14,0)] + scratchColumnTypeNames: [decimal(14,2), decimal(16,2), decimal(16,2), decimal(14,0)] Map 2 Map Operator Tree: TableScan @@ -1043,12 +968,12 @@ STAGE PLANS: Statistics: Num rows: 1049 Data size: 223552 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(14,0), 1:value_dec:decimal(14,0), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(14,0)/DECIMAL_64, 1:value_dec:decimal(14,0)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(14,0)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(14,0))(children: ConvertDecimal64ToDecimal(col 0:decimal(14,0)/DECIMAL_64) -> 3:decimal(14,0)) predicate: dec is not null (type: boolean) Statistics: Num rows: 997 Data size: 212470 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -1077,8 +1002,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1086,9 +1010,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: dec:decimal(14,0), value_dec:decimal(14,0) + dataColumns: dec:decimal(14,0)/DECIMAL_64, value_dec:decimal(14,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(14,0)] Stage: Stage-0 Fetch Operator @@ -1106,112 +1030,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### -14.00 33.66 14 10 -14.00 33.66 14 22 -14.00 33.66 14 34 -14.00 33.66 14 39 -14.00 33.66 14 42 -14.00 33.66 14 45 -14.00 33.66 14 46 -14.00 33.66 14 49 -14.00 33.66 14 5 -17.00 14.26 17 1 -17.00 14.26 17 14 -17.00 14.26 17 16 -17.00 14.26 17 19 -17.00 14.26 17 2 -17.00 14.26 17 22 -17.00 14.26 17 29 -17.00 14.26 17 3 -17.00 14.26 17 4 -17.00 14.26 17 44 -45.00 23.55 45 1 -45.00 23.55 45 2 -45.00 23.55 45 22 -45.00 23.55 45 24 -45.00 23.55 45 42 -6.00 29.78 6 16 -6.00 29.78 6 28 -6.00 29.78 6 30 -6.00 29.78 6 34 -6.00 29.78 6 36 -6.00 29.78 6 44 -62.00 21.02 62 15 -62.00 21.02 62 15 -62.00 21.02 62 21 -62.00 21.02 62 21 -62.00 21.02 62 22 -62.00 21.02 62 25 -62.00 21.02 62 29 -62.00 21.02 62 3 -62.00 21.02 62 34 -62.00 21.02 62 47 -62.00 21.02 62 47 -62.00 21.02 62 49 -64.00 37.76 64 0 -64.00 37.76 64 10 -64.00 37.76 64 10 -64.00 37.76 64 13 -64.00 37.76 64 23 -64.00 37.76 64 25 -64.00 37.76 64 26 -64.00 37.76 64 27 -64.00 37.76 64 27 -64.00 37.76 64 30 -64.00 37.76 64 32 -64.00 37.76 64 34 -64.00 37.76 64 35 -64.00 37.76 64 38 -64.00 37.76 64 40 -64.00 37.76 64 43 -64.00 37.76 64 5 -64.00 37.76 64 50 -70.00 24.59 70 2 -70.00 24.59 70 25 -70.00 24.59 70 27 -70.00 24.59 70 28 -70.00 24.59 70 3 -70.00 24.59 70 32 -70.00 24.59 70 44 -79.00 15.12 79 1 -79.00 15.12 79 15 -79.00 15.12 79 25 -79.00 15.12 79 30 -79.00 15.12 79 35 -79.00 15.12 79 35 -89.00 15.09 89 1 89.00 15.09 89 15 -89.00 15.09 89 23 -89.00 15.09 89 27 -89.00 15.09 89 28 -89.00 15.09 89 29 -89.00 15.09 89 30 -89.00 15.09 89 32 -89.00 15.09 89 39 -89.00 15.09 89 40 -89.00 15.09 89 45 -89.00 15.09 89 7 -9.00 48.96 9 12 -9.00 48.96 9 15 -9.00 48.96 9 2 -9.00 48.96 9 2 -9.00 48.96 9 2 -9.00 48.96 9 20 -9.00 48.96 9 20 -9.00 48.96 9 21 -9.00 48.96 9 21 -9.00 48.96 9 26 -9.00 48.96 9 27 -9.00 48.96 9 34 -9.00 48.96 9 38 -9.00 48.96 9 41 -9.00 48.96 9 42 -9.00 48.96 9 45 -9.00 48.96 9 48 -9.00 48.96 9 49 -9.00 48.96 9 5 -9.00 48.96 9 7 -9.00 48.96 9 7 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 PREHOOK: query: explain vectorization detail select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`) PREHOOK: type: QUERY @@ -1372,112 +1202,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### -14.00 14 -14.00 14 -14.00 14 -14.00 14 -14.00 14 -14.00 14 -14.00 14 -14.00 14 -14.00 14 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -17.00 17 -45.00 45 -45.00 45 -45.00 45 -45.00 45 -45.00 45 -6.00 6 -6.00 6 -6.00 6 -6.00 6 -6.00 6 -6.00 6 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -62.00 62 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -64.00 64 -70.00 70 -70.00 70 -70.00 70 -70.00 70 -70.00 70 -70.00 70 -70.00 70 -79.00 79 -79.00 79 -79.00 79 -79.00 79 -79.00 79 -79.00 79 -89.00 89 -89.00 89 -89.00 89 -89.00 89 -89.00 89 89.00 89 -89.00 89 -89.00 89 -89.00 89 -89.00 89 -89.00 89 -89.00 89 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 -9.00 9 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 PREHOOK: query: explain vectorization detail select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`) PREHOOK: type: QUERY @@ -1639,109 +1375,15 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### -14.00 33.66 14 10 -14.00 33.66 14 22 -14.00 33.66 14 34 -14.00 33.66 14 39 -14.00 33.66 14 42 -14.00 33.66 14 45 -14.00 33.66 14 46 -14.00 33.66 14 49 -14.00 33.66 14 5 -17.00 14.26 17 1 -17.00 14.26 17 14 -17.00 14.26 17 16 -17.00 14.26 17 19 -17.00 14.26 17 2 -17.00 14.26 17 22 -17.00 14.26 17 29 -17.00 14.26 17 3 -17.00 14.26 17 4 -17.00 14.26 17 44 -45.00 23.55 45 1 -45.00 23.55 45 2 -45.00 23.55 45 22 -45.00 23.55 45 24 -45.00 23.55 45 42 -6.00 29.78 6 16 -6.00 29.78 6 28 -6.00 29.78 6 30 -6.00 29.78 6 34 -6.00 29.78 6 36 -6.00 29.78 6 44 -62.00 21.02 62 15 -62.00 21.02 62 15 -62.00 21.02 62 21 -62.00 21.02 62 21 -62.00 21.02 62 22 -62.00 21.02 62 25 -62.00 21.02 62 29 -62.00 21.02 62 3 -62.00 21.02 62 34 -62.00 21.02 62 47 -62.00 21.02 62 47 -62.00 21.02 62 49 -64.00 37.76 64 0 -64.00 37.76 64 10 -64.00 37.76 64 10 -64.00 37.76 64 13 -64.00 37.76 64 23 -64.00 37.76 64 25 -64.00 37.76 64 26 -64.00 37.76 64 27 -64.00 37.76 64 27 -64.00 37.76 64 30 -64.00 37.76 64 32 -64.00 37.76 64 34 -64.00 37.76 64 35 -64.00 37.76 64 38 -64.00 37.76 64 40 -64.00 37.76 64 43 -64.00 37.76 64 5 -64.00 37.76 64 50 -70.00 24.59 70 2 -70.00 24.59 70 25 -70.00 24.59 70 27 -70.00 24.59 70 28 -70.00 24.59 70 3 -70.00 24.59 70 32 -70.00 24.59 70 44 -79.00 15.12 79 1 -79.00 15.12 79 15 -79.00 15.12 79 25 -79.00 15.12 79 30 -79.00 15.12 79 35 -79.00 15.12 79 35 -89.00 15.09 89 1 89.00 15.09 89 15 -89.00 15.09 89 23 -89.00 15.09 89 27 -89.00 15.09 89 28 -89.00 15.09 89 29 -89.00 15.09 89 30 -89.00 15.09 89 32 -89.00 15.09 89 39 -89.00 15.09 89 40 -89.00 15.09 89 45 -89.00 15.09 89 7 -9.00 48.96 9 12 -9.00 48.96 9 15 -9.00 48.96 9 2 -9.00 48.96 9 2 -9.00 48.96 9 2 -9.00 48.96 9 20 -9.00 48.96 9 20 -9.00 48.96 9 21 -9.00 48.96 9 21 -9.00 48.96 9 26 -9.00 48.96 9 27 -9.00 48.96 9 34 -9.00 48.96 9 38 -9.00 48.96 9 41 -9.00 48.96 9 42 -9.00 48.96 9 45 -9.00 48.96 9 48 -9.00 48.96 9 49 -9.00 48.96 9 5 -9.00 48.96 9 7 -9.00 48.96 9 7 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out index ec43f62..e3d5044 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out @@ -150,8 +150,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -364,12 +364,12 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 1401000 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:cbigint:bigint, 1:cdouble:double, 2:cdecimal1:decimal(12,4), 3:cdecimal2:decimal(14,8), 4:ROW__ID:struct] + vectorizationSchemaColumns: [0:cbigint:bigint, 1:cdouble:double, 2:cdecimal1:decimal(12,4)/DECIMAL_64, 3:cdecimal2:decimal(14,8)/DECIMAL_64, 4:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterLongColEqualLongScalar(col 5:bigint, val 0)(children: LongColModuloLongScalar(col 0:bigint, val 500) -> 5:bigint), FilterDoubleColGreaterEqualDoubleScalar(col 7:double, val -1.0)(children: FuncSinDoubleToDouble(col 6:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 6:double) -> 7:double)) + predicateExpression: FilterExprAndExpr(children: FilterLongColEqualLongScalar(col 5:bigint, val 0)(children: LongColModuloLongScalar(col 0:bigint, val 500) -> 5:bigint), FilterDoubleColGreaterEqualDoubleScalar(col 8:double, val -1.0)(children: FuncSinDoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 7:double) -> 8:double)) predicate: (((cbigint % 500) = 0) and (sin(cdecimal1) >= -1.0D)) (type: boolean) Statistics: Num rows: 2048 Data size: 233500 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -378,8 +378,8 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2, 8, 9, 10, 11, 6, 12, 13, 14, 16, 17, 7, 18, 20, 21, 22, 23, 24, 25, 26, 27, 28, 2, 29, 5, 30] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(12,4), decimalPlaces 2) -> 8:decimal(11,2), FuncRoundDecimalToDecimal(col 2:decimal(12,4)) -> 9:decimal(9,0), FuncFloorDecimalToDecimal(col 2:decimal(12,4)) -> 10:decimal(9,0), FuncCeilDecimalToDecimal(col 2:decimal(12,4)) -> 11:decimal(9,0), RoundWithNumDigitsDoubleToDouble(col 7, decimalPlaces 58)(children: FuncExpDoubleToDouble(col 6:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 6:double) -> 7:double) -> 6:double, FuncLnDoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 7:double) -> 12:double, FuncLog10DoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 7:double) -> 13:double, FuncLog2DoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 7:double) -> 14:double, FuncLog2DoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 15:decimal(13,4))(children: DecimalColSubtractDecimalScalar(col 2:decimal(12,4), val 15601) -> 15:decimal(13,4)) -> 7:double) -> 16:double, FuncLogWithBaseDoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 7:double) -> 17:double, FuncPowerDoubleToDouble(col 18:double)(children: FuncLog2DoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 7:double) -> 18:double) -> 7:double, FuncPowerDoubleToDouble(col 19:double)(children: FuncLog2DoubleToDouble(col 18:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 18:double) -> 19:double) -> 18:double, FuncSqrtDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 20:double, FuncAbsDecimalToDecimal(col 2:decimal(12,4)) -> 21:decimal(12,4), FuncSinDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 22:double, FuncASinDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 23:double, FuncCosDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 24:double, FuncACosDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 25:double, FuncATanDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 26:double, FuncDegreesDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 27:double, FuncRadiansDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 28:double, FuncNegateDecimalToDecimal(col 2:decimal(12,4)) -> 29:decimal(12,4), FuncSignDecimalToLong(col 2:decimal(12,4)) -> 5:int, FuncCosDoubleToDouble(col 19:double)(children: DoubleColAddDoubleScalar(col 30:double, val 3.14159)(children: DoubleColUnaryMinus(col 19:double)(children: FuncSinDoubleToDouble(col 30:double)(children: FuncLnDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 30:double) -> 19:double) -> 30:double) -> 19:double) -> 30:double + projectedOutputColumnNums: [2, 9, 10, 11, 12, 7, 13, 14, 15, 17, 18, 8, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 2, 30, 5, 31] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 6:decimal(12,4), decimalPlaces 2)(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 9:decimal(11,2), FuncRoundDecimalToDecimal(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 10:decimal(9,0), FuncFloorDecimalToDecimal(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 11:decimal(9,0), FuncCeilDecimalToDecimal(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 12:decimal(9,0), RoundWithNumDigitsDoubleToDouble(col 8, decimalPlaces 58)(children: FuncExpDoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 7:double) -> 8:double) -> 7:double, FuncLnDoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 8:double) -> 13:double, FuncLog10DoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 8:double) -> 14:double, FuncLog2DoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 8:double) -> 15:double, FuncLog2DoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 32:decimal(13,4))(children: ConvertDecimal64ToDecimal(col 16:decimal(13,4)/DECIMAL_64)(children: Decimal64ColSubtractDecimal64Scalar(col 2:decimal(12,4)/DECIMAL_64, decimal64Val 156010000, decimalVal 15601) -> 16:decimal(13,4)/DECIMAL_64) -> 32:decimal(13,4)) -> 8:double) -> 17:double, FuncLogWithBaseDoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 8:double) -> 18:double, FuncPowerDoubleToDouble(col 19:double)(children: FuncLog2DoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 8:double) -> 19:double) -> 8:double, FuncPowerDoubleToDouble(col 20:double)(children: FuncLog2DoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 19:double) -> 20:double) -> 19:double, FuncSqrtDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 21:double, FuncAbsDecimalToDecimal(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 22:decimal(12,4), FuncSinDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 23:double, FuncASinDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 24:double, FuncCosDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 25:double, FuncACosDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 26:double, FuncATanDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 27:double, FuncDegreesDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 28:double, FuncRadiansDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 29:double, FuncNegateDecimalToDecimal(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 30:decimal(12,4), FuncSignDecimalToLong(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 5:int, FuncCosDoubleToDouble(col 20:double)(children: DoubleColAddDoubleScalar(col 31:double, val 3.14159)(children: DoubleColUnaryMinus(col 20:double)(children: FuncSinDoubleToDouble(col 31:double)(children: FuncLnDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 31:double) -> 20:double) -> 31:double) -> 20:double) -> 31:double Statistics: Num rows: 2048 Data size: 233500 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -396,8 +396,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -405,9 +405,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 4 includeColumns: [0, 2] - dataColumns: cbigint:bigint, cdouble:double, cdecimal1:decimal(12,4), cdecimal2:decimal(14,8) + dataColumns: cbigint:bigint, cdouble:double, cdecimal1:decimal(12,4)/DECIMAL_64, cdecimal2:decimal(14,8)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, double, double, decimal(11,2), decimal(9,0), decimal(9,0), decimal(9,0), double, double, double, decimal(13,4), double, double, double, double, double, decimal(12,4), double, double, double, double, double, double, double, decimal(12,4), double] + scratchColumnTypeNames: [bigint, decimal(12,4), double, double, decimal(11,2), decimal(9,0), decimal(9,0), decimal(9,0), double, double, double, decimal(13,4)/DECIMAL_64, double, double, double, double, double, decimal(12,4), double, double, double, double, double, double, double, decimal(12,4), double, decimal(13,4)] Stage: Stage-0 Fetch Operator diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out index 50e4305..5e7e8ca 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out @@ -612,8 +612,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1232,8 +1232,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out index 6737052..eb4a588 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out @@ -59,15 +59,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col1 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -77,7 +77,7 @@ STAGE PLANS: keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumnNums: [3] Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(11,0)) Execution mode: vectorized, llap @@ -86,8 +86,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -95,9 +94,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -177,22 +176,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col2 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col2 (type: decimal(11,0)) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [0] @@ -204,8 +203,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -213,9 +211,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -585,15 +583,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col1 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -603,7 +601,7 @@ STAGE PLANS: keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumnNums: [3] Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(11,0)) Execution mode: vectorized, llap @@ -611,8 +609,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -620,9 +618,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -702,22 +700,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col2 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col2 (type: decimal(11,0)) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [0] @@ -728,8 +726,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -737,9 +735,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out index e3d4f40..bb0cbfc 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out @@ -89,8 +89,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -257,8 +257,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -452,8 +452,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -636,8 +636,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out index b33f090..a35f6fe 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out @@ -94,7 +94,7 @@ STAGE PLANS: Statistics: Num rows: 30 Data size: 4936 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:id:int, 1:a:decimal(10,4), 2:b:decimal(15,8), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:id:int, 1:a:decimal(10,4)/DECIMAL_64, 2:b:decimal(15,8)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: id (type: int), a (type: decimal(10,4)), b (type: decimal(15,8)) outputColumnNames: _col0, _col1, _col2 @@ -119,8 +119,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -128,7 +128,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0, 1, 2] - dataColumns: id:int, a:decimal(10,4), b:decimal(15,8) + dataColumns: id:int, a:decimal(10,4)/DECIMAL_64, b:decimal(15,8)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out index deb9f67..1ef50ca 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out @@ -109,8 +109,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -226,8 +226,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -343,8 +343,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -460,8 +460,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -577,8 +577,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -694,8 +694,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -811,8 +811,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -928,8 +928,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1045,8 +1045,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1168,8 +1168,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1269,8 +1269,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1386,8 +1386,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1503,8 +1503,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1620,8 +1620,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1744,8 +1744,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1863,8 +1863,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1972,8 +1972,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2074,8 +2074,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2191,8 +2191,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2327,8 +2327,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2507,8 +2507,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2781,8 +2781,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2898,8 +2898,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3015,8 +3015,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -3132,8 +3132,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3268,8 +3268,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3432,8 +3432,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3675,8 +3675,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3806,8 +3806,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3937,8 +3937,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4061,7 +4061,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (key + key) (type: decimal(16,3)) outputColumnNames: _col0 @@ -4069,7 +4069,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3] - selectExpressions: DecimalColAddDecimalColumn(col 0:decimal(15,3), col 0:decimal(15,3)) -> 3:decimal(16,3) + selectExpressions: Decimal64ColAddDecimal64Column(col 0:decimal(15,3)/DECIMAL_64, col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(16,3)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -4087,8 +4087,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4096,9 +4095,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(16,3)] + scratchColumnTypeNames: [decimal(16,3)/DECIMAL_64] Stage: Stage-0 Fetch Operator @@ -4179,15 +4178,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (key + CAST( value AS decimal(10,0))) (type: decimal(16,3)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: DecimalColAddDecimalColumn(col 0:decimal(15,3), col 3:decimal(10,0))(children: CastLongToDecimal(col 1:int) -> 3:decimal(10,0)) -> 4:decimal(16,3) + projectedOutputColumnNums: [5] + selectExpressions: DecimalColAddDecimalColumn(col 3:decimal(15,3), col 4:decimal(10,0))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3), CastLongToDecimal(col 1:int) -> 4:decimal(10,0)) -> 5:decimal(16,3) Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -4205,8 +4204,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4214,9 +4212,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(10,0), decimal(16,3)] + scratchColumnTypeNames: [decimal(15,3), decimal(10,0), decimal(16,3)] Stage: Stage-0 Fetch Operator @@ -4297,15 +4295,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (UDFToDouble(key) + (UDFToDouble(value) / 2.0D)) (type: double) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: DoubleColAddDoubleColumn(col 3:double, col 5:double)(children: CastDecimalToDouble(col 0:decimal(15,3)) -> 3:double, DoubleColDivideDoubleScalar(col 4:double, val 2.0)(children: CastLongToDouble(col 1:int) -> 4:double) -> 5:double) -> 4:double + projectedOutputColumnNums: [5] + selectExpressions: DoubleColAddDoubleColumn(col 4:double, col 6:double)(children: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double, DoubleColDivideDoubleScalar(col 5:double, val 2.0)(children: CastLongToDouble(col 1:int) -> 5:double) -> 6:double) -> 5:double Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -4323,8 +4321,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4332,9 +4329,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double] + scratchColumnTypeNames: [decimal(15,3), double, double, double] Stage: Stage-0 Fetch Operator @@ -4415,15 +4412,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (UDFToDouble(key) + 1.0D) (type: double) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: DoubleColAddDoubleScalar(col 3:double, val 1.0)(children: CastDecimalToDouble(col 0:decimal(15,3)) -> 3:double) -> 4:double + projectedOutputColumnNums: [5] + selectExpressions: DoubleColAddDoubleScalar(col 4:double, val 1.0)(children: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double) -> 5:double Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -4441,8 +4438,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4450,9 +4446,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double] + scratchColumnTypeNames: [decimal(15,3), double, double] Stage: Stage-0 Fetch Operator @@ -4533,7 +4529,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (key - key) (type: decimal(16,3)) outputColumnNames: _col0 @@ -4541,7 +4537,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3] - selectExpressions: DecimalColSubtractDecimalColumn(col 0:decimal(15,3), col 0:decimal(15,3)) -> 3:decimal(16,3) + selectExpressions: Decimal64ColSubtractDecimal64Column(col 0:decimal(15,3)/DECIMAL_64, col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(16,3)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -4559,8 +4555,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4568,9 +4563,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(16,3)] + scratchColumnTypeNames: [decimal(16,3)/DECIMAL_64] Stage: Stage-0 Fetch Operator @@ -4651,15 +4646,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (key - CAST( value AS decimal(10,0))) (type: decimal(16,3)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: DecimalColSubtractDecimalColumn(col 0:decimal(15,3), col 3:decimal(10,0))(children: CastLongToDecimal(col 1:int) -> 3:decimal(10,0)) -> 4:decimal(16,3) + projectedOutputColumnNums: [5] + selectExpressions: DecimalColSubtractDecimalColumn(col 3:decimal(15,3), col 4:decimal(10,0))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3), CastLongToDecimal(col 1:int) -> 4:decimal(10,0)) -> 5:decimal(16,3) Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -4677,8 +4672,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4686,9 +4680,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(10,0), decimal(16,3)] + scratchColumnTypeNames: [decimal(15,3), decimal(10,0), decimal(16,3)] Stage: Stage-0 Fetch Operator @@ -4769,15 +4763,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (UDFToDouble(key) - (UDFToDouble(value) / 2.0D)) (type: double) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: DoubleColSubtractDoubleColumn(col 3:double, col 5:double)(children: CastDecimalToDouble(col 0:decimal(15,3)) -> 3:double, DoubleColDivideDoubleScalar(col 4:double, val 2.0)(children: CastLongToDouble(col 1:int) -> 4:double) -> 5:double) -> 4:double + projectedOutputColumnNums: [5] + selectExpressions: DoubleColSubtractDoubleColumn(col 4:double, col 6:double)(children: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double, DoubleColDivideDoubleScalar(col 5:double, val 2.0)(children: CastLongToDouble(col 1:int) -> 5:double) -> 6:double) -> 5:double Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -4795,8 +4789,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4804,9 +4797,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double] + scratchColumnTypeNames: [decimal(15,3), double, double, double] Stage: Stage-0 Fetch Operator @@ -4887,15 +4880,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (UDFToDouble(key) - 1.0D) (type: double) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: DoubleColSubtractDoubleScalar(col 3:double, val 1.0)(children: CastDecimalToDouble(col 0:decimal(15,3)) -> 3:double) -> 4:double + projectedOutputColumnNums: [5] + selectExpressions: DoubleColSubtractDoubleScalar(col 4:double, val 1.0)(children: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double) -> 5:double Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -4913,8 +4906,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4922,9 +4914,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double] + scratchColumnTypeNames: [decimal(15,3), double, double] Stage: Stage-0 Fetch Operator @@ -5005,15 +4997,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (key * key) (type: decimal(31,6)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [3] - selectExpressions: DecimalColMultiplyDecimalColumn(col 0:decimal(15,3), col 0:decimal(15,3)) -> 3:decimal(31,6) + projectedOutputColumnNums: [5] + selectExpressions: DecimalColMultiplyDecimalColumn(col 3:decimal(15,3), col 4:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3), ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 4:decimal(15,3)) -> 5:decimal(31,6) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -5031,8 +5023,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5040,9 +5031,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(31,6)] + scratchColumnTypeNames: [decimal(15,3), decimal(15,3), decimal(31,6)] Stage: Stage-0 Fetch Operator @@ -5123,12 +5114,12 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColGreaterDecimalScalar(col 4:decimal(26,3), val 0)(children: DecimalColMultiplyDecimalColumn(col 0:decimal(15,3), col 3:decimal(10,0))(children: CastLongToDecimal(col 1:int) -> 3:decimal(10,0)) -> 4:decimal(26,3)) + predicateExpression: FilterDecimalColGreaterDecimalScalar(col 5:decimal(26,3), val 0)(children: DecimalColMultiplyDecimalColumn(col 3:decimal(15,3), col 4:decimal(10,0))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3), CastLongToDecimal(col 1:int) -> 4:decimal(10,0)) -> 5:decimal(26,3)) predicate: ((key * CAST( value AS decimal(10,0))) > 0) (type: boolean) Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -5155,8 +5146,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5164,9 +5154,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(10,0), decimal(26,3)] + scratchColumnTypeNames: [decimal(15,3), decimal(10,0), decimal(26,3)] Stage: Stage-0 Fetch Operator @@ -5231,15 +5221,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (key * CAST( value AS decimal(10,0))) (type: decimal(26,3)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: DecimalColMultiplyDecimalColumn(col 0:decimal(15,3), col 3:decimal(10,0))(children: CastLongToDecimal(col 1:int) -> 3:decimal(10,0)) -> 4:decimal(26,3) + projectedOutputColumnNums: [5] + selectExpressions: DecimalColMultiplyDecimalColumn(col 3:decimal(15,3), col 4:decimal(10,0))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3), CastLongToDecimal(col 1:int) -> 4:decimal(10,0)) -> 5:decimal(26,3) Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -5257,8 +5247,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5266,9 +5255,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(10,0), decimal(26,3)] + scratchColumnTypeNames: [decimal(15,3), decimal(10,0), decimal(26,3)] Stage: Stage-0 Fetch Operator @@ -5349,15 +5338,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (UDFToDouble(key) * (UDFToDouble(value) / 2.0D)) (type: double) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: DoubleColMultiplyDoubleColumn(col 3:double, col 5:double)(children: CastDecimalToDouble(col 0:decimal(15,3)) -> 3:double, DoubleColDivideDoubleScalar(col 4:double, val 2.0)(children: CastLongToDouble(col 1:int) -> 4:double) -> 5:double) -> 4:double + projectedOutputColumnNums: [5] + selectExpressions: DoubleColMultiplyDoubleColumn(col 4:double, col 6:double)(children: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double, DoubleColDivideDoubleScalar(col 5:double, val 2.0)(children: CastLongToDouble(col 1:int) -> 5:double) -> 6:double) -> 5:double Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -5375,8 +5364,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5384,9 +5372,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double] + scratchColumnTypeNames: [decimal(15,3), double, double, double] Stage: Stage-0 Fetch Operator @@ -5467,15 +5455,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (UDFToDouble(key) * 2.0D) (type: double) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: DoubleColMultiplyDoubleScalar(col 3:double, val 2.0)(children: CastDecimalToDouble(col 0:decimal(15,3)) -> 3:double) -> 4:double + projectedOutputColumnNums: [5] + selectExpressions: DoubleColMultiplyDoubleScalar(col 4:double, val 2.0)(children: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double) -> 5:double Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -5493,8 +5481,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5502,9 +5489,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double] + scratchColumnTypeNames: [decimal(15,3), double, double] Stage: Stage-0 Fetch Operator @@ -5585,15 +5572,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (key / 0) (type: decimal(18,6)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [3] - selectExpressions: DecimalColDivideDecimalScalar(col 0:decimal(15,3), val 0) -> 3:decimal(18,6) + projectedOutputColumnNums: [4] + selectExpressions: DecimalColDivideDecimalScalar(col 3:decimal(15,3), val 0)(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:decimal(18,6) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -5611,8 +5598,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5620,9 +5606,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(18,6)] + scratchColumnTypeNames: [decimal(15,3), decimal(18,6)] Stage: Stage-0 Fetch Operator @@ -5703,12 +5689,12 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColNotEqualDecimalScalar(col 0:decimal(15,3), val 0) + predicateExpression: FilterDecimal64ColNotEqualDecimal64Scalar(col 0:decimal(15,3)/DECIMAL_64, val 0) predicate: (key <> 0) (type: boolean) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -5717,8 +5703,8 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [3] - selectExpressions: DecimalColDivideDecimalColumn(col 0:decimal(15,3), col 0:decimal(15,3)) -> 3:decimal(34,19) + projectedOutputColumnNums: [5] + selectExpressions: DecimalColDivideDecimalColumn(col 3:decimal(15,3), col 4:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3), ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 4:decimal(15,3)) -> 5:decimal(34,19) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -5736,8 +5722,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5745,9 +5730,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(34,19)] + scratchColumnTypeNames: [decimal(15,3), decimal(15,3), decimal(34,19)] Stage: Stage-0 Fetch Operator @@ -5823,7 +5808,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -5837,8 +5822,8 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: DecimalColDivideDecimalColumn(col 0:decimal(15,3), col 3:decimal(10,0))(children: CastLongToDecimal(col 1:int) -> 3:decimal(10,0)) -> 4:decimal(26,14) + projectedOutputColumnNums: [5] + selectExpressions: DecimalColDivideDecimalColumn(col 3:decimal(15,3), col 4:decimal(10,0))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3), CastLongToDecimal(col 1:int) -> 4:decimal(10,0)) -> 5:decimal(26,14) Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -5856,8 +5841,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5865,9 +5849,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(10,0), decimal(26,14)] + scratchColumnTypeNames: [decimal(15,3), decimal(10,0), decimal(26,14)] Stage: Stage-0 Fetch Operator @@ -5933,7 +5917,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -5947,8 +5931,8 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: DoubleColDivideDoubleColumn(col 3:double, col 5:double)(children: CastDecimalToDouble(col 0:decimal(15,3)) -> 3:double, DoubleColDivideDoubleScalar(col 4:double, val 2.0)(children: CastLongToDouble(col 1:int) -> 4:double) -> 5:double) -> 4:double + projectedOutputColumnNums: [5] + selectExpressions: DoubleColDivideDoubleColumn(col 4:double, col 6:double)(children: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double, DoubleColDivideDoubleScalar(col 5:double, val 2.0)(children: CastLongToDouble(col 1:int) -> 5:double) -> 6:double) -> 5:double Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -5966,8 +5950,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5975,9 +5958,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double] + scratchColumnTypeNames: [decimal(15,3), double, double, double] Stage: Stage-0 Fetch Operator @@ -6043,15 +6026,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (1.0D + (UDFToDouble(key) / 2.0D)) (type: double) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [3] - selectExpressions: DoubleScalarAddDoubleColumn(val 1.0, col 4:double)(children: DoubleColDivideDoubleScalar(col 3:double, val 2.0)(children: CastDecimalToDouble(col 0:decimal(15,3)) -> 3:double) -> 4:double) -> 3:double + projectedOutputColumnNums: [4] + selectExpressions: DoubleScalarAddDoubleColumn(val 1.0, col 5:double)(children: DoubleColDivideDoubleScalar(col 4:double, val 2.0)(children: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double) -> 5:double) -> 4:double Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -6069,8 +6052,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6078,9 +6060,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double] + scratchColumnTypeNames: [decimal(15,3), double, double] Stage: Stage-0 Fetch Operator @@ -6161,15 +6143,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: abs(key) (type: decimal(15,3)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [3] - selectExpressions: FuncAbsDecimalToDecimal(col 0:decimal(15,3)) -> 3:decimal(15,3) + projectedOutputColumnNums: [4] + selectExpressions: FuncAbsDecimalToDecimal(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:decimal(15,3) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -6187,8 +6169,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6196,9 +6177,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(15,3)] + scratchColumnTypeNames: [decimal(15,3), decimal(15,3)] Stage: Stage-0 Fetch Operator @@ -6283,7 +6264,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: key (type: decimal(15,3)), value (type: int) outputColumnNames: key, value @@ -6295,7 +6276,7 @@ STAGE PLANS: Group By Operator aggregations: sum(key), count(key) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(15,3)) -> decimal(25,3), VectorUDAFCount(col 0:decimal(15,3)) -> bigint + aggregators: VectorUDAFSumDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> decimal(25,3), VectorUDAFCount(col 0:decimal(15,3)/DECIMAL_64) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 1:int @@ -6324,8 +6305,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6333,7 +6313,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -6479,15 +6459,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (- key) (type: decimal(15,3)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [3] - selectExpressions: FuncNegateDecimalToDecimal(col 0:decimal(15,3)) -> 3:decimal(15,3) + projectedOutputColumnNums: [4] + selectExpressions: FuncNegateDecimalToDecimal(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:decimal(15,3) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -6505,8 +6485,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6514,9 +6493,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(15,3)] + scratchColumnTypeNames: [decimal(15,3), decimal(15,3)] Stage: Stage-0 Fetch Operator @@ -6754,15 +6733,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: floor(key) (type: decimal(13,0)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [3] - selectExpressions: FuncFloorDecimalToDecimal(col 0:decimal(15,3)) -> 3:decimal(13,0) + projectedOutputColumnNums: [4] + selectExpressions: FuncFloorDecimalToDecimal(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:decimal(13,0) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -6780,8 +6759,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6789,9 +6767,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(13,0)] + scratchColumnTypeNames: [decimal(15,3), decimal(13,0)] Stage: Stage-0 Fetch Operator @@ -6872,15 +6850,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: round(key, 2) (type: decimal(15,2)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [3] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(15,3), decimalPlaces 2) -> 3:decimal(15,2) + projectedOutputColumnNums: [4] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 3:decimal(15,3), decimalPlaces 2)(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:decimal(15,2) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -6898,8 +6876,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6907,9 +6884,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(15,2)] + scratchColumnTypeNames: [decimal(15,3), decimal(15,2)] Stage: Stage-0 Fetch Operator @@ -6990,7 +6967,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: power(key, 2) (type: double) outputColumnNames: _col0 @@ -7016,8 +6993,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true @@ -7025,7 +7001,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double] @@ -7108,15 +7084,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: ((key + 1) % (key / 2)) (type: decimal(18,6)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [5] - selectExpressions: DecimalColModuloDecimalColumn(col 3:decimal(16,3), col 4:decimal(18,6))(children: DecimalColAddDecimalScalar(col 0:decimal(15,3), val 1) -> 3:decimal(16,3), DecimalColDivideDecimalScalar(col 0:decimal(15,3), val 2) -> 4:decimal(18,6)) -> 5:decimal(18,6) + projectedOutputColumnNums: [6] + selectExpressions: DecimalColModuloDecimalColumn(col 7:decimal(16,3), col 5:decimal(18,6))(children: ConvertDecimal64ToDecimal(col 3:decimal(16,3)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 0:decimal(15,3)/DECIMAL_64, decimal64Val 1000, decimalVal 1) -> 3:decimal(16,3)/DECIMAL_64) -> 7:decimal(16,3), DecimalColDivideDecimalScalar(col 4:decimal(15,3), val 2)(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 4:decimal(15,3)) -> 5:decimal(18,6)) -> 6:decimal(18,6) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -7134,8 +7110,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7143,9 +7118,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(16,3), decimal(18,6), decimal(18,6)] + scratchColumnTypeNames: [decimal(16,3)/DECIMAL_64, decimal(15,3), decimal(18,6), decimal(18,6), decimal(16,3)] Stage: Stage-0 Fetch Operator @@ -7229,20 +7204,20 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: value (type: int), key (type: decimal(15,3)), UDFToDouble(key) (type: double), (UDFToDouble(key) * UDFToDouble(key)) (type: double) outputColumnNames: _col0, _col1, _col2, _col3 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [1, 0, 3, 6] - selectExpressions: CastDecimalToDouble(col 0:decimal(15,3)) -> 3:double, DoubleColMultiplyDoubleColumn(col 4:double, col 5:double)(children: CastDecimalToDouble(col 0:decimal(15,3)) -> 4:double, CastDecimalToDouble(col 0:decimal(15,3)) -> 5:double) -> 6:double + projectedOutputColumnNums: [1, 0, 4, 7] + selectExpressions: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double, DoubleColMultiplyDoubleColumn(col 5:double, col 6:double)(children: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 5:double, CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 6:double) -> 7:double Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col3), sum(_col2), count(_col1) Group By Vectorization: - aggregators: VectorUDAFSumDouble(col 6:double) -> double, VectorUDAFSumDouble(col 3:double) -> double, VectorUDAFCount(col 0:decimal(15,3)) -> bigint + aggregators: VectorUDAFSumDouble(col 7:double) -> double, VectorUDAFSumDouble(col 4:double) -> double, VectorUDAFCount(col 0:decimal(15,3)/DECIMAL_64) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 1:int @@ -7271,8 +7246,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7280,9 +7254,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double] + scratchColumnTypeNames: [decimal(15,3), double, double, double, double] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -7394,20 +7368,20 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: value (type: int), key (type: decimal(15,3)), UDFToDouble(key) (type: double), (UDFToDouble(key) * UDFToDouble(key)) (type: double) outputColumnNames: _col0, _col1, _col2, _col3 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [1, 0, 3, 6] - selectExpressions: CastDecimalToDouble(col 0:decimal(15,3)) -> 3:double, DoubleColMultiplyDoubleColumn(col 4:double, col 5:double)(children: CastDecimalToDouble(col 0:decimal(15,3)) -> 4:double, CastDecimalToDouble(col 0:decimal(15,3)) -> 5:double) -> 6:double + projectedOutputColumnNums: [1, 0, 4, 7] + selectExpressions: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double, DoubleColMultiplyDoubleColumn(col 5:double, col 6:double)(children: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 5:double, CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 6:double) -> 7:double Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col3), sum(_col2), count(_col1) Group By Vectorization: - aggregators: VectorUDAFSumDouble(col 6:double) -> double, VectorUDAFSumDouble(col 3:double) -> double, VectorUDAFCount(col 0:decimal(15,3)) -> bigint + aggregators: VectorUDAFSumDouble(col 7:double) -> double, VectorUDAFSumDouble(col 4:double) -> double, VectorUDAFCount(col 0:decimal(15,3)/DECIMAL_64) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 1:int @@ -7436,8 +7410,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7445,9 +7418,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double] + scratchColumnTypeNames: [decimal(15,3), double, double, double, double] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -7643,7 +7616,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: key (type: decimal(15,3)) outputColumnNames: key @@ -7655,7 +7628,7 @@ STAGE PLANS: Group By Operator aggregations: min(key) Group By Vectorization: - aggregators: VectorUDAFMinDecimal(col 0:decimal(15,3)) -> decimal(15,3) + aggregators: VectorUDAFMinDecimal64(col 0:decimal(15,3)/DECIMAL_64) -> decimal(15,3)/DECIMAL_64 className: VectorGroupByOperator groupByMode: HASH native: false @@ -7680,8 +7653,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7689,7 +7661,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -7775,7 +7747,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: key (type: decimal(15,3)) outputColumnNames: key @@ -7787,7 +7759,7 @@ STAGE PLANS: Group By Operator aggregations: max(key) Group By Vectorization: - aggregators: VectorUDAFMaxDecimal(col 0:decimal(15,3)) -> decimal(15,3) + aggregators: VectorUDAFMaxDecimal64(col 0:decimal(15,3)/DECIMAL_64) -> decimal(15,3)/DECIMAL_64 className: VectorGroupByOperator groupByMode: HASH native: false @@ -7812,8 +7784,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7821,7 +7792,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -7907,7 +7878,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(15,3), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(15,3)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: key (type: decimal(15,3)) outputColumnNames: key @@ -7919,7 +7890,7 @@ STAGE PLANS: Group By Operator aggregations: count(key) Group By Vectorization: - aggregators: VectorUDAFCount(col 0:decimal(15,3)) -> bigint + aggregators: VectorUDAFCount(col 0:decimal(15,3)/DECIMAL_64) -> bigint className: VectorGroupByOperator groupByMode: HASH native: false @@ -7944,8 +7915,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7953,7 +7923,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(15,3), value:int + dataColumns: key:decimal(15,3)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out index 9e1c8d7..4c9b737 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out @@ -86,12 +86,12 @@ STAGE PLANS: Statistics: Num rows: 39 Data size: 4032 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -118,8 +118,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -127,7 +127,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double] @@ -195,12 +195,12 @@ STAGE PLANS: Statistics: Num rows: 39 Data size: 4188 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 2 Data size: 232 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -227,8 +227,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -236,7 +236,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double, double] @@ -310,12 +310,12 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -343,8 +343,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -352,7 +351,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double] @@ -420,12 +419,12 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -453,8 +452,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true @@ -462,7 +460,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double, double] diff --git a/ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out b/ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out index bf272d0..747b74a 100644 --- a/ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out @@ -164,8 +164,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_elt.q.out b/ql/src/test/results/clientpositive/llap/vector_elt.q.out index 7303886..5745af2 100644 --- a/ql/src/test/results/clientpositive/llap/vector_elt.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_elt.q.out @@ -63,8 +63,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -174,8 +174,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby4.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby4.q.out index 342da4e..6912d7b 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby4.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby4.q.out @@ -77,8 +77,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby6.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby6.q.out index d0b3395..d3c6548 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby6.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby6.q.out @@ -77,8 +77,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out index ac6c589..a118b2e 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out @@ -167,8 +167,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out index 2ea9018..5c0d6bb 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out @@ -84,8 +84,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -222,8 +221,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -386,8 +384,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -648,8 +645,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1038,8 +1034,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out index 26e31d0..1ffa0fd 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out @@ -94,8 +94,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -253,8 +253,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -412,8 +412,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -565,8 +565,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -718,8 +718,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -878,8 +878,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out index dbc9c9a..dce2930 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out @@ -99,8 +99,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -301,8 +301,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -512,8 +512,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -819,8 +819,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1122,8 +1122,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1450,8 +1450,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1771,8 +1771,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1936,8 +1936,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2165,8 +2165,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out index 8d3c152..02f4683 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out @@ -113,8 +113,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -281,8 +281,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out index 315e7c7..1229c6d 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out @@ -113,8 +113,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -275,8 +275,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -437,8 +437,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -599,8 +599,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -754,8 +754,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -910,8 +910,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1052,8 +1052,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out index 38cafb0..f8220e1 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out @@ -99,8 +99,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -280,8 +280,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -684,8 +684,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out index 7c77c4b..d00306b 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out @@ -66,7 +66,7 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 5760 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:a:string, 1:b:string, 2:c_dec:decimal(10,2), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:a:string, 1:b:string, 2:c_dec:decimal(10,2)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: a (type: string), b (type: string), c_dec (type: decimal(10,2)) outputColumnNames: a, b, c_dec @@ -78,7 +78,7 @@ STAGE PLANS: Group By Operator aggregations: sum(c_dec), count(c_dec), count() Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 2:decimal(10,2)) -> decimal(20,2), VectorUDAFCount(col 2:decimal(10,2)) -> bigint, VectorUDAFCountStar(*) -> bigint + aggregators: VectorUDAFSumDecimal64ToDecimal(col 2:decimal(10,2)/DECIMAL_64) -> decimal(20,2), VectorUDAFCount(col 2:decimal(10,2)/DECIMAL_64) -> bigint, VectorUDAFCountStar(*) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 4:bigint @@ -106,8 +106,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -115,7 +115,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0, 1, 2] - dataColumns: a:string, b:string, c_dec:decimal(10,2) + dataColumns: a:string, b:string, c_dec:decimal(10,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [bigint] Reducer 2 @@ -205,7 +205,7 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 5760 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:a:string, 1:b:string, 2:c_dec:decimal(10,2), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:a:string, 1:b:string, 2:c_dec:decimal(10,2)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: a (type: string), b (type: string), c_dec (type: decimal(10,2)) outputColumnNames: a, b, c_dec @@ -217,7 +217,7 @@ STAGE PLANS: Group By Operator aggregations: sum(c_dec), count(c_dec), count() Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 2:decimal(10,2)) -> decimal(20,2), VectorUDAFCount(col 2:decimal(10,2)) -> bigint, VectorUDAFCountStar(*) -> bigint + aggregators: VectorUDAFSumDecimal64ToDecimal(col 2:decimal(10,2)/DECIMAL_64) -> decimal(20,2), VectorUDAFCount(col 2:decimal(10,2)/DECIMAL_64) -> bigint, VectorUDAFCountStar(*) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 4:bigint @@ -245,8 +245,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -254,7 +254,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0, 1, 2] - dataColumns: a:string, b:string, c_dec:decimal(10,2) + dataColumns: a:string, b:string, c_dec:decimal(10,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [bigint] Reducer 2 @@ -370,7 +370,7 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 5760 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:a:string, 1:b:string, 2:c_dec:decimal(10,2), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:a:string, 1:b:string, 2:c_dec:decimal(10,2)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: a (type: string), b (type: string), c_dec (type: decimal(10,2)) outputColumnNames: a, b, c_dec @@ -382,7 +382,7 @@ STAGE PLANS: Group By Operator aggregations: sum(c_dec), count(c_dec), count() Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 2:decimal(10,2)) -> decimal(20,2), VectorUDAFCount(col 2:decimal(10,2)) -> bigint, VectorUDAFCountStar(*) -> bigint + aggregators: VectorUDAFSumDecimal64ToDecimal(col 2:decimal(10,2)/DECIMAL_64) -> decimal(20,2), VectorUDAFCount(col 2:decimal(10,2)/DECIMAL_64) -> bigint, VectorUDAFCountStar(*) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 0:string, col 1:string @@ -410,8 +410,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -419,7 +419,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0, 1, 2] - dataColumns: a:string, b:string, c_dec:decimal(10,2) + dataColumns: a:string, b:string, c_dec:decimal(10,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out index 285c154..a262f26 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out @@ -119,8 +119,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -363,8 +363,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -626,8 +626,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out index 586d713..bbfba28 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out @@ -98,8 +98,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -290,8 +290,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -509,8 +509,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out index 069594e..e26b6c5 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out @@ -98,8 +98,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -241,8 +241,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out index 0999c30..9501927 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out @@ -98,8 +98,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -259,8 +259,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -427,8 +427,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -596,8 +596,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -802,8 +802,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -963,8 +963,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1138,8 +1138,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1300,8 +1300,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1500,8 +1500,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1656,8 +1656,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1814,8 +1814,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1963,8 +1963,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2129,8 +2129,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2295,8 +2295,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2456,8 +2456,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out index c4b7fc3..bddde5f 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out @@ -100,8 +100,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -301,8 +301,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -502,8 +502,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -700,8 +700,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -895,8 +895,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1080,8 +1080,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out index 58e184d..5d81631 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out @@ -97,8 +97,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out index b99a4ac..e6628ab 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out @@ -61,8 +61,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -129,8 +128,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out index df9a46e..b2953fd 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out @@ -293,8 +293,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -491,8 +491,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -784,8 +784,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1008,8 +1008,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out index 9d36c65..d1f8ac5 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out @@ -95,8 +95,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -351,8 +351,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -732,8 +732,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out index b772e9a..ab29314 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out @@ -98,8 +98,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -521,8 +520,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -706,8 +704,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -887,8 +884,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out index 74ad6ae..5e946c4 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out @@ -107,8 +107,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out b/ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out index f355d4b..e0d533f 100644 --- a/ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out @@ -190,8 +190,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -321,8 +321,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_if_expr.q.out b/ql/src/test/results/clientpositive/llap/vector_if_expr.q.out index d2edc1f..a88e385 100644 --- a/ql/src/test/results/clientpositive/llap/vector_if_expr.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_if_expr.q.out @@ -57,8 +57,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_if_expr_2.q.out b/ql/src/test/results/clientpositive/llap/vector_if_expr_2.q.out index f4baa69..ddcd2a0 100644 --- a/ql/src/test/results/clientpositive/llap/vector_if_expr_2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_if_expr_2.q.out @@ -72,8 +72,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out b/ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out index 1e8a942..041990a 100644 --- a/ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out @@ -211,8 +211,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -238,8 +238,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_inner_join.q.out b/ql/src/test/results/clientpositive/llap/vector_inner_join.q.out index bb555df..fb3d7cb 100644 --- a/ql/src/test/results/clientpositive/llap/vector_inner_join.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_inner_join.q.out @@ -117,8 +117,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -168,8 +168,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -276,8 +276,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -339,8 +339,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -491,8 +491,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -543,8 +543,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -635,8 +635,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -704,8 +704,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -822,8 +822,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -874,8 +874,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -991,8 +991,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1043,8 +1043,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1160,8 +1160,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1212,8 +1212,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1304,8 +1304,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1381,8 +1381,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1473,8 +1473,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1550,8 +1550,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_interval_1.q.out b/ql/src/test/results/clientpositive/llap/vector_interval_1.q.out index dcedca8..815b2a3 100644 --- a/ql/src/test/results/clientpositive/llap/vector_interval_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_interval_1.q.out @@ -112,8 +112,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -241,8 +241,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -378,8 +378,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -527,8 +527,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -687,8 +687,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -829,8 +829,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -953,8 +953,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1083,8 +1083,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out b/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out index 2ee7502..1cd498f 100644 --- a/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out @@ -152,8 +152,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -358,8 +358,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -564,8 +564,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -770,8 +770,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -967,8 +967,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1161,8 +1161,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1345,8 +1345,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1529,8 +1529,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1723,8 +1723,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1917,8 +1917,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out b/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out index 87993d2..af5815f 100644 --- a/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out @@ -106,8 +106,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -288,8 +288,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -470,8 +470,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -654,8 +654,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -759,8 +759,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -943,8 +943,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1127,8 +1127,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1309,8 +1309,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out index cfe3d5f..2b9fc34 100644 --- a/ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out @@ -256,8 +256,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -299,8 +299,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_join30.q.out b/ql/src/test/results/clientpositive/llap/vector_join30.q.out index 9530804..5187574 100644 --- a/ql/src/test/results/clientpositive/llap/vector_join30.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_join30.q.out @@ -79,8 +79,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -146,8 +146,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -296,8 +296,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -332,8 +332,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -457,8 +457,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -517,8 +517,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -655,8 +655,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -728,8 +728,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -770,8 +770,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -907,8 +907,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -943,8 +943,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -978,8 +978,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1137,8 +1137,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1173,8 +1173,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1208,8 +1208,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1367,8 +1367,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1403,8 +1403,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1438,8 +1438,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1597,8 +1597,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1633,8 +1633,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1668,8 +1668,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out b/ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out index 6201a48..3d78cfe 100644 --- a/ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out @@ -75,8 +75,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -100,8 +100,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -125,8 +125,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out b/ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out index 25a644d..55be910 100644 --- a/ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out @@ -340,8 +340,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -376,8 +376,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -482,8 +482,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -518,8 +518,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -621,8 +621,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -657,8 +657,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -760,8 +760,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -796,8 +796,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out index f1db9af..d739408 100644 --- a/ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out @@ -3372,8 +3372,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3402,8 +3402,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3485,8 +3485,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3515,8 +3515,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3600,8 +3600,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3630,8 +3630,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3710,8 +3710,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3740,8 +3740,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3828,8 +3828,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3858,8 +3858,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3941,8 +3941,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3971,8 +3971,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4054,8 +4054,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4084,8 +4084,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4164,8 +4164,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4191,8 +4191,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4273,8 +4273,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4303,8 +4303,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4399,8 +4399,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4429,8 +4429,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4513,8 +4513,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4535,8 +4535,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4565,8 +4565,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4658,8 +4658,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4688,8 +4688,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4779,8 +4779,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4809,8 +4809,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4839,8 +4839,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4929,8 +4929,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4948,8 +4948,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4975,8 +4975,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5072,8 +5072,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5091,8 +5091,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5118,8 +5118,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5219,8 +5219,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5246,8 +5246,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5265,8 +5265,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5366,8 +5366,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5393,8 +5393,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5412,8 +5412,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5515,8 +5515,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5542,8 +5542,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5561,8 +5561,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5688,8 +5688,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5718,8 +5718,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5737,8 +5737,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5852,8 +5852,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5882,8 +5882,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5972,8 +5972,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6035,8 +6035,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6174,8 +6174,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6237,8 +6237,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6378,8 +6378,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6441,8 +6441,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6582,8 +6582,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6645,8 +6645,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6789,8 +6789,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6852,8 +6852,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6992,8 +6992,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7059,8 +7059,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7199,8 +7199,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7266,8 +7266,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7403,8 +7403,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7462,8 +7462,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7600,8 +7600,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7663,8 +7663,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7815,8 +7815,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -7879,8 +7879,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -8028,8 +8028,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -8072,8 +8072,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8135,8 +8135,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -8284,8 +8284,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -8347,8 +8347,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -8497,8 +8497,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -8560,8 +8560,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -8623,8 +8623,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -8768,8 +8768,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -8804,8 +8804,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8860,8 +8860,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -8998,8 +8998,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9034,8 +9034,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9090,8 +9090,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -9248,8 +9248,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9304,8 +9304,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -9340,8 +9340,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9498,8 +9498,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9554,8 +9554,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -9590,8 +9590,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9750,8 +9750,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9806,8 +9806,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -9842,8 +9842,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10053,8 +10053,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -10116,8 +10116,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -10152,8 +10152,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10329,8 +10329,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -10392,8 +10392,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -10492,8 +10492,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10555,8 +10555,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -10695,8 +10695,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10758,8 +10758,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -10900,8 +10900,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10963,8 +10963,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -11105,8 +11105,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11168,8 +11168,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -11313,8 +11313,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11376,8 +11376,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -11516,8 +11516,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -11584,8 +11584,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11724,8 +11724,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -11792,8 +11792,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11929,8 +11929,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -11989,8 +11989,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12128,8 +12128,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12191,8 +12191,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -12344,8 +12344,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12408,8 +12408,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -12557,8 +12557,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -12601,8 +12601,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12664,8 +12664,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -12814,8 +12814,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12877,8 +12877,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -13027,8 +13027,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -13090,8 +13090,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -13153,8 +13153,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -13298,8 +13298,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -13334,8 +13334,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -13390,8 +13390,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -13528,8 +13528,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -13564,8 +13564,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -13620,8 +13620,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -13778,8 +13778,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -13834,8 +13834,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -13870,8 +13870,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -14028,8 +14028,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -14084,8 +14084,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -14120,8 +14120,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -14280,8 +14280,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -14336,8 +14336,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -14372,8 +14372,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -14585,8 +14585,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -14648,8 +14648,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -14684,8 +14684,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -14862,8 +14862,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -14925,8 +14925,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -15025,8 +15025,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -15088,8 +15088,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -15228,8 +15228,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -15291,8 +15291,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -15433,8 +15433,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -15496,8 +15496,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -15638,8 +15638,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -15701,8 +15701,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -15846,8 +15846,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -15909,8 +15909,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -16049,8 +16049,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -16117,8 +16117,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -16257,8 +16257,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -16325,8 +16325,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -16462,8 +16462,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -16522,8 +16522,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -16661,8 +16661,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -16724,8 +16724,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -16877,8 +16877,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -16941,8 +16941,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -17090,8 +17090,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -17134,8 +17134,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -17197,8 +17197,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -17347,8 +17347,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -17410,8 +17410,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -17560,8 +17560,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -17623,8 +17623,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -17686,8 +17686,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -17831,8 +17831,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -17867,8 +17867,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -17923,8 +17923,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -18061,8 +18061,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -18097,8 +18097,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -18153,8 +18153,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -18311,8 +18311,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -18367,8 +18367,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -18403,8 +18403,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -18561,8 +18561,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -18617,8 +18617,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -18653,8 +18653,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -18813,8 +18813,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -18869,8 +18869,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -18905,8 +18905,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -19118,8 +19118,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -19181,8 +19181,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -19217,8 +19217,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -19395,8 +19395,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -19458,8 +19458,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_like_2.q.out b/ql/src/test/results/clientpositive/llap/vector_like_2.q.out index 8e132a7..f3ec37a 100644 --- a/ql/src/test/results/clientpositive/llap/vector_like_2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_like_2.q.out @@ -74,8 +74,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out b/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out index a35b816..abddf5a 100644 --- a/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out @@ -179,8 +179,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -259,8 +258,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_map_order.q.out b/ql/src/test/results/clientpositive/llap/vector_map_order.q.out index 02fc5a0..238555c 100644 --- a/ql/src/test/results/clientpositive/llap/vector_map_order.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_map_order.q.out @@ -75,8 +75,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out b/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out index 09a53d0..e0c7dfa 100644 --- a/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out @@ -67,8 +67,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -111,8 +110,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -166,8 +164,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -339,8 +336,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -384,8 +380,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -440,8 +435,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out b/ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out index 9b01d79..8d2ce82 100644 --- a/ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out @@ -300,8 +300,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -329,8 +329,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_multi_insert.q.out b/ql/src/test/results/clientpositive/llap/vector_multi_insert.q.out index 4bf6a03..392c8f5 100644 --- a/ql/src/test/results/clientpositive/llap/vector_multi_insert.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_multi_insert.q.out @@ -149,8 +149,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_null_map.q.out b/ql/src/test/results/clientpositive/llap/vector_null_map.q.out index 666f7fd..5394cc6 100644 --- a/ql/src/test/results/clientpositive/llap/vector_null_map.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_null_map.q.out @@ -76,8 +76,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -149,8 +148,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_null_projection.q.out b/ql/src/test/results/clientpositive/llap/vector_null_projection.q.out index 27eb15e..7bb01a6 100644 --- a/ql/src/test/results/clientpositive/llap/vector_null_projection.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_null_projection.q.out @@ -83,8 +83,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out b/ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out index 9801470..8a05290 100644 --- a/ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out @@ -94,8 +94,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -122,8 +122,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -231,8 +231,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -266,8 +266,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -301,8 +301,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -395,8 +395,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -423,8 +423,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -451,8 +451,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -579,8 +579,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -613,8 +613,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -647,8 +647,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -741,8 +741,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -768,8 +768,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -795,8 +795,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -987,8 +987,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1015,8 +1015,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1124,8 +1124,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1159,8 +1159,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1194,8 +1194,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1288,8 +1288,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1316,8 +1316,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1344,8 +1344,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1472,8 +1472,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1506,8 +1506,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1540,8 +1540,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1634,8 +1634,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1661,8 +1661,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1688,8 +1688,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out b/ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out index 9a7bd94..fc9c453 100644 --- a/ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out @@ -164,8 +164,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -290,8 +290,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_nvl.q.out b/ql/src/test/results/clientpositive/llap/vector_nvl.q.out index dbcb770..13ebb17 100644 --- a/ql/src/test/results/clientpositive/llap/vector_nvl.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_nvl.q.out @@ -65,8 +65,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -160,8 +160,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -253,8 +253,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -346,8 +346,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out b/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out index f1a4ea3..90e2103 100644 --- a/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out @@ -161,8 +161,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true @@ -245,8 +244,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out b/ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out index 74af8f8..797d994 100644 --- a/ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out @@ -161,8 +161,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -234,8 +234,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -307,8 +307,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -380,8 +380,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -453,8 +453,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -526,8 +526,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -606,8 +606,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -686,8 +686,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -766,8 +766,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1000,8 +1000,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1090,8 +1090,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1415,8 +1415,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1459,8 +1459,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1577,8 +1577,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1718,8 +1718,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1774,8 +1774,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1877,8 +1877,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1911,8 +1911,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2025,8 +2025,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2070,8 +2070,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2228,8 +2228,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2367,8 +2367,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2506,8 +2506,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2746,8 +2746,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_orc_null_check.q.out b/ql/src/test/results/clientpositive/llap/vector_orc_null_check.q.out index 79eaf98..989c88e 100644 --- a/ql/src/test/results/clientpositive/llap/vector_orc_null_check.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_orc_null_check.q.out @@ -88,8 +88,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_order_null.q.out b/ql/src/test/results/clientpositive/llap/vector_order_null.q.out index 08c57bd..cb4053e 100644 --- a/ql/src/test/results/clientpositive/llap/vector_order_null.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_order_null.q.out @@ -127,8 +127,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -250,8 +249,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -373,8 +371,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -496,8 +493,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -619,8 +615,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -742,8 +737,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -865,8 +859,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -988,8 +981,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1111,8 +1103,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1234,8 +1225,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1357,8 +1347,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out b/ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out index d3e10b0..0de0c33 100644 --- a/ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out @@ -168,8 +168,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out b/ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out index 50e6a85..19e98f3 100644 --- a/ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out @@ -135,8 +135,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -180,8 +180,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -270,8 +270,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -333,8 +333,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out b/ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out index 0bce01d..c74a588 100644 --- a/ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out @@ -295,8 +295,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -340,8 +340,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -469,8 +469,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -513,8 +513,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -765,8 +765,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -809,8 +809,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -853,8 +853,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out b/ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out index c2dc2b3..2e90aae 100644 --- a/ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out @@ -340,8 +340,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -384,8 +384,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -428,8 +428,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out b/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out index 89c14d5..2b0a1e7 100644 --- a/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out @@ -271,7 +271,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(15,2)) outputColumnNames: c1 @@ -283,7 +283,7 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(15,2)) -> decimal(25,2) + aggregators: VectorUDAFSumDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> decimal(25,2) className: VectorGroupByOperator groupByMode: HASH native: false @@ -308,8 +308,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -317,7 +316,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -489,7 +488,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(15,2)), c2 (type: decimal(15,2)) outputColumnNames: c1, c2 @@ -501,10 +500,10 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(15,2)) -> decimal(25,2) + aggregators: VectorUDAFSumDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> decimal(25,2) className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:decimal(15,2), col 1:decimal(15,2) + keyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2), ConvertDecimal64ToDecimal(col 1:decimal(15,2)/DECIMAL_64) -> 4:decimal(15,2) native: false vectorProcessingMode: HASH projectedOutputColumnNums: [0] @@ -530,8 +529,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -539,9 +537,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2), decimal(15,2)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -726,12 +724,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -760,8 +758,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -769,9 +766,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Map 5 Map Operator Tree: TableScan @@ -779,12 +776,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -812,8 +809,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -821,9 +817,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1035,12 +1031,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1068,8 +1064,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1077,9 +1072,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Map 5 Map Operator Tree: TableScan @@ -1087,12 +1082,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1121,8 +1116,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1130,9 +1124,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1343,12 +1337,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1377,8 +1371,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1386,9 +1379,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Map 4 Map Operator Tree: TableScan @@ -1396,12 +1389,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1430,8 +1423,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1439,9 +1431,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1571,7 +1563,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(7,2)) outputColumnNames: c1 @@ -1583,7 +1575,7 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(7,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(7,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: HASH native: false @@ -1608,8 +1600,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1617,7 +1608,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1789,7 +1780,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(7,2)), c2 (type: decimal(7,2)) outputColumnNames: c1, c2 @@ -1801,10 +1792,10 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(7,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(7,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:decimal(7,2), col 1:decimal(7,2) + keyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2), ConvertDecimal64ToDecimal(col 1:decimal(7,2)/DECIMAL_64) -> 4:decimal(7,2) native: false vectorProcessingMode: HASH projectedOutputColumnNums: [0] @@ -1830,8 +1821,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1839,9 +1829,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2), decimal(7,2)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -2026,12 +2016,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2060,8 +2050,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2069,9 +2058,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Map 5 Map Operator Tree: TableScan @@ -2079,12 +2068,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2112,8 +2101,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2121,9 +2109,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -2335,12 +2323,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2368,8 +2356,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2377,9 +2364,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Map 5 Map Operator Tree: TableScan @@ -2387,12 +2374,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2421,8 +2408,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2430,9 +2416,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -2643,12 +2629,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2677,8 +2663,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2686,9 +2671,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Map 4 Map Operator Tree: TableScan @@ -2696,12 +2681,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2730,8 +2715,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2739,9 +2723,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git a/ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out b/ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out index 068453f..bf8e2d8 100644 --- a/ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out @@ -134,8 +134,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -304,8 +304,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -474,8 +474,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -631,8 +631,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -788,8 +788,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out b/ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out index 1de9ed4..1bbb9d0 100644 --- a/ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out @@ -297,8 +297,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -475,8 +475,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -969,8 +969,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1204,8 +1204,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1406,8 +1406,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1924,8 +1924,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2159,8 +2159,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2361,8 +2361,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out b/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out index 568549d..ef4934e 100644 --- a/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out @@ -111,8 +111,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out b/ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out index ec8611d..2471c5d 100644 --- a/ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out @@ -153,8 +153,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -413,8 +413,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -642,8 +642,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -872,8 +872,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1134,8 +1134,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1364,8 +1364,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1595,8 +1595,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1858,8 +1858,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2089,8 +2089,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2312,8 +2312,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2540,8 +2540,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2768,8 +2768,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2966,8 +2966,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3196,8 +3196,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3426,8 +3426,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3625,8 +3625,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3856,8 +3856,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4087,8 +4087,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4326,8 +4326,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4555,8 +4555,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4804,8 +4804,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5033,8 +5033,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5256,8 +5256,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5456,8 +5456,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5657,8 +5657,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5827,8 +5827,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6157,8 +6157,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6357,8 +6357,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6559,8 +6559,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6760,8 +6760,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_reduce1.q.out b/ql/src/test/results/clientpositive/llap/vector_reduce1.q.out index 0a3127c..a3ad696 100644 --- a/ql/src/test/results/clientpositive/llap/vector_reduce1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_reduce1.q.out @@ -151,8 +151,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_reduce2.q.out b/ql/src/test/results/clientpositive/llap/vector_reduce2.q.out index afe443b..8ff51ac 100644 --- a/ql/src/test/results/clientpositive/llap/vector_reduce2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_reduce2.q.out @@ -151,8 +151,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_reduce3.q.out b/ql/src/test/results/clientpositive/llap/vector_reduce3.q.out index f0faa56..e26c8b2 100644 --- a/ql/src/test/results/clientpositive/llap/vector_reduce3.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_reduce3.q.out @@ -151,8 +151,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out b/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out index c92c5cd..b241f30 100644 --- a/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out @@ -89,8 +89,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out b/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out index 1ed694d..bda96da 100644 --- a/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out @@ -128,8 +128,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out b/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out index 64e158e..c2342b2 100644 --- a/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out @@ -80,8 +80,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out b/ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out index 8fb0752..c95d08a 100644 --- a/ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out @@ -139,8 +139,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -346,8 +346,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_string_concat.q.out b/ql/src/test/results/clientpositive/llap/vector_string_concat.q.out index 284b57f..38d9172 100644 --- a/ql/src/test/results/clientpositive/llap/vector_string_concat.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_string_concat.q.out @@ -155,8 +155,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -379,8 +379,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_string_decimal.q.out b/ql/src/test/results/clientpositive/llap/vector_string_decimal.q.out index 8e55ed3..ff84fe8 100644 --- a/ql/src/test/results/clientpositive/llap/vector_string_decimal.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_string_decimal.q.out @@ -70,7 +70,7 @@ STAGE PLANS: Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDoubleColumnInList(col 2:double, values [1.0E8, 2.0E8])(children: CastDecimalToDouble(col 0:decimal(18,0)) -> 2:double) + predicateExpression: FilterDoubleColumnInList(col 3:double, values [1.0E8, 2.0E8])(children: CastDecimalToDouble(col 2:decimal(18,0))(children: ConvertDecimal64ToDecimal(col 0:decimal(18,0)/DECIMAL_64) -> 2:decimal(18,0)) -> 3:double) predicate: (UDFToDouble(id)) IN (1.0E8D, 2.0E8D) (type: boolean) Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -96,8 +96,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_struct_in.q.out b/ql/src/test/results/clientpositive/llap/vector_struct_in.q.out index f210b72..671d020 100644 --- a/ql/src/test/results/clientpositive/llap/vector_struct_in.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_struct_in.q.out @@ -94,8 +94,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -211,8 +211,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -352,8 +352,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -469,8 +469,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -610,8 +610,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -727,8 +727,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -871,8 +871,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -991,8 +991,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_udf1.q.out b/ql/src/test/results/clientpositive/llap/vector_udf1.q.out index 9e6e8e5..aef23fd 100644 --- a/ql/src/test/results/clientpositive/llap/vector_udf1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_udf1.q.out @@ -95,8 +95,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -196,8 +196,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -297,8 +297,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -398,8 +398,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -499,8 +499,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -600,8 +600,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -701,8 +701,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -802,8 +802,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -903,8 +903,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1004,8 +1004,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1105,8 +1105,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1206,8 +1206,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1307,8 +1307,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1408,8 +1408,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1509,8 +1509,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1610,8 +1610,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1711,8 +1711,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1812,8 +1812,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1913,8 +1913,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -2014,8 +2014,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -2115,8 +2115,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2214,8 +2214,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -2311,8 +2311,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -2408,8 +2408,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -2507,8 +2507,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2608,8 +2608,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2811,8 +2811,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2954,8 +2954,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_udf2.q.out b/ql/src/test/results/clientpositive/llap/vector_udf2.q.out index 8e3ccc9..58c60d1 100644 --- a/ql/src/test/results/clientpositive/llap/vector_udf2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_udf2.q.out @@ -94,8 +94,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -211,8 +211,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -298,8 +298,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_udf_adaptor_1.q.out b/ql/src/test/results/clientpositive/llap/vector_udf_adaptor_1.q.out index 3bc9806..1c96cd6 100644 --- a/ql/src/test/results/clientpositive/llap/vector_udf_adaptor_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_udf_adaptor_1.q.out @@ -314,8 +314,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -490,8 +490,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -822,8 +822,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -998,8 +998,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out b/ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out index 70f354c..6d9057c 100644 --- a/ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out @@ -174,8 +174,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out b/ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out index 117246e..90a0869 100644 --- a/ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out @@ -177,8 +177,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -206,8 +206,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -294,8 +294,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -332,8 +332,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -422,8 +422,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -460,8 +460,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_varchar_simple.q.out b/ql/src/test/results/clientpositive/llap/vector_varchar_simple.q.out index 5e798db..149d20a 100644 --- a/ql/src/test/results/clientpositive/llap/vector_varchar_simple.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_varchar_simple.q.out @@ -91,8 +91,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -211,8 +211,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -345,8 +345,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out b/ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out index 4474a0f..7a50163 100644 --- a/ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out @@ -83,8 +83,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing.q.out index 428ee8d..a5d6167 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing.q.out @@ -56,8 +56,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -259,8 +258,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -467,8 +465,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -646,8 +643,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -841,8 +837,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1043,8 +1038,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1251,8 +1245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1296,8 +1289,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1537,8 +1529,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1697,8 +1688,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1874,8 +1864,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2053,8 +2042,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2244,8 +2232,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2411,8 +2398,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2578,8 +2564,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2798,8 +2783,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3090,8 +3074,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3369,8 +3352,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3622,8 +3604,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3836,8 +3817,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4061,8 +4041,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4258,8 +4237,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4526,8 +4504,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4808,8 +4785,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5323,8 +5299,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6080,8 +6055,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6263,8 +6237,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6428,8 +6401,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6585,8 +6557,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6748,8 +6719,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6921,8 +6891,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7088,8 +7057,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7265,8 +7233,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7446,8 +7413,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7626,8 +7592,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7824,8 +7789,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8020,8 +7984,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8244,8 +8207,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8485,8 +8447,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8674,8 +8635,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8829,8 +8789,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9017,8 +8976,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9172,8 +9130,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9331,8 +9288,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9564,8 +9520,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9705,8 +9660,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9854,8 +9808,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out index 148f82b..2bb7730 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out @@ -102,8 +102,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -296,8 +295,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -438,7 +436,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: t (type: tinyint), bo (type: boolean), s (type: string), si (type: smallint), f (type: float) sort order: ++++- @@ -457,8 +455,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -466,7 +463,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 1, 4, 6, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -662,7 +659,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), i (type: int), s (type: string) sort order: +++ @@ -681,8 +678,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -690,7 +686,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -886,7 +882,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 204 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: b (type: bigint), si (type: smallint), s (type: string), d (type: double) sort order: ++++ @@ -905,8 +901,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -914,7 +909,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 3, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1110,7 +1105,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: f (type: float), b (type: bigint) sort order: ++ @@ -1130,8 +1125,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1139,7 +1133,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1354,8 +1348,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1533,8 +1526,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1680,7 +1672,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), i (type: int) sort order: ++ @@ -1700,8 +1692,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1709,7 +1700,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1944,8 +1935,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out index d87e96f..993ea61 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out @@ -70,8 +70,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -122,8 +121,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out index 8dcb900..493d404 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out @@ -74,8 +74,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -317,8 +316,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -554,8 +552,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -913,8 +910,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -965,8 +961,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out index 01bcb69..1a06f08 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint) sort order: ++ @@ -94,8 +94,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -103,7 +102,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 3, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10242,7 +10241,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -10269,8 +10268,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10278,7 +10276,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 7, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10520,7 +10518,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -10546,8 +10544,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10555,7 +10552,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10787,7 +10784,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -10814,8 +10811,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10823,7 +10819,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 6, 7, 10] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11060,7 +11056,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -11086,8 +11082,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11095,7 +11090,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11340,7 +11335,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 304 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -11367,8 +11362,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11376,7 +11370,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 4, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out index 5466297..42e9694 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out @@ -100,8 +100,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -226,7 +225,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 304 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), dec (type: decimal(4,2)) sort order: ++ @@ -246,8 +245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -255,7 +253,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [5, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -485,7 +483,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: bin (type: binary), d (type: double), i (type: int) sort order: ++- @@ -505,8 +503,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -514,7 +511,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7, 10] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -710,7 +707,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), s (type: string), dec (type: decimal(4,2)) sort order: +++ @@ -729,8 +726,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -738,7 +734,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -934,7 +930,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), f (type: float) sort order: ++ @@ -954,8 +950,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -963,7 +958,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 4, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1193,7 +1188,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: bo (type: boolean), s (type: string) sort order: ++ @@ -1212,8 +1207,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1221,7 +1215,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [6, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1418,7 +1412,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1446,8 +1440,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1455,7 +1448,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [bigint, bigint] Reducer 2 @@ -1639,8 +1632,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1812,8 +1804,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1985,8 +1976,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2158,8 +2148,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out index f950c4c..91b52e7 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out @@ -82,7 +82,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), s (type: string), b (type: bigint) sort order: +++ @@ -101,8 +101,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -110,7 +109,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 3, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -215,7 +214,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), s (type: string), f (type: float) sort order: ++- @@ -234,8 +233,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -243,7 +241,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -348,7 +346,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -368,8 +366,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -377,7 +374,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -482,7 +479,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: t (type: tinyint), s (type: string), d (type: double) sort order: ++- @@ -501,8 +498,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -510,7 +506,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -615,7 +611,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), s (type: string) sort order: ++ @@ -635,8 +631,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -644,7 +639,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -785,7 +780,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: +- @@ -805,8 +800,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -814,7 +808,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -950,7 +944,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: +- @@ -970,8 +964,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -979,7 +972,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1115,7 +1108,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -1135,8 +1128,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1144,7 +1136,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out index 3a1b9c5e..782bd9b 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), i (type: int), b (type: bigint) sort order: +++ @@ -94,8 +94,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -103,7 +102,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 1, 2, 3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -333,7 +332,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), bo (type: boolean), i (type: int), f (type: float) sort order: +++- @@ -352,8 +351,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -361,7 +359,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -556,7 +554,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), bo (type: boolean), i (type: int), f (type: float) sort order: +++- @@ -575,8 +573,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -584,7 +581,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -779,7 +776,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string) sort order: + @@ -798,8 +795,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -807,7 +803,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10929,7 +10925,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint), i (type: int) sort order: +++ @@ -10948,8 +10944,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10957,7 +10952,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11187,7 +11182,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint), i (type: int) sort order: +++ @@ -11206,8 +11201,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11215,7 +11209,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11445,7 +11439,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint), i (type: int) sort order: ++- @@ -11464,8 +11458,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11473,7 +11466,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11703,7 +11696,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), bo (type: boolean), i (type: int), f (type: float) sort order: +++- @@ -11722,8 +11715,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11731,7 +11723,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11961,7 +11953,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), bo (type: boolean), b (type: bigint) sort order: +++ @@ -11980,8 +11972,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11989,7 +11980,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 3, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -12185,7 +12176,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), CAST( s AS CHAR(12)) (type: char(12)) sort order: ++ @@ -12206,8 +12197,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12215,7 +12205,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [string] Reducer 2 @@ -12411,7 +12401,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), CAST( s AS varchar(12)) (type: varchar(12)) sort order: ++ @@ -12432,8 +12422,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12441,7 +12430,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [string] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out index 2bf3b07..ff7cf6c 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: f (type: float), t (type: tinyint) sort order: ++ @@ -94,8 +94,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -103,7 +102,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -334,7 +333,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), i (type: int), s (type: string) sort order: ++- @@ -353,8 +352,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -362,7 +360,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -558,7 +556,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: bo (type: boolean), b (type: bigint), s (type: string) sort order: +++ @@ -577,8 +575,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -586,7 +583,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 6, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -782,7 +779,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: dec (type: decimal(4,2)), f (type: float) sort order: ++ @@ -802,8 +799,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -811,7 +807,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1028,7 +1024,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1062,8 +1058,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1071,7 +1066,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Map 4 @@ -1081,7 +1076,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1114,8 +1109,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1123,7 +1117,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1329,7 +1323,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1363,8 +1357,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1372,7 +1365,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Map 4 @@ -1382,7 +1375,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1415,8 +1408,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1424,7 +1416,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1632,7 +1624,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1666,8 +1658,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1675,7 +1666,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 3, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Map 4 @@ -1685,7 +1676,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1718,8 +1709,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1727,7 +1717,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out index e120391..55899ef 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out @@ -95,8 +95,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -242,8 +241,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -425,7 +423,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -450,8 +448,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -459,7 +456,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 4] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -853,8 +850,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out index e3d52d2..93b8655 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), s (type: string), b (type: bigint) sort order: +++ @@ -93,8 +93,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -102,7 +101,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 3, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -297,7 +296,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), s (type: string), f (type: float) sort order: +++ @@ -316,8 +315,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -325,7 +323,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -520,7 +518,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -540,8 +538,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -549,7 +546,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -744,7 +741,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), s (type: string), f (type: float) sort order: +++ @@ -763,8 +760,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -772,7 +768,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -967,7 +963,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: t (type: tinyint), s (type: string), d (type: double) sort order: ++- @@ -986,8 +982,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -995,7 +990,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1190,7 +1185,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), s (type: string) sort order: ++ @@ -1210,8 +1205,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1219,7 +1213,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1449,7 +1443,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -1468,8 +1462,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1477,7 +1470,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1707,7 +1700,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -1726,8 +1719,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1735,7 +1727,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1930,7 +1922,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -1950,8 +1942,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1959,7 +1950,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -2097,7 +2088,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -2117,8 +2108,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2126,7 +2116,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -2264,7 +2254,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -2284,8 +2274,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2293,7 +2282,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out index 3cebb04..78df440 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out @@ -92,8 +92,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_0.q.out b/ql/src/test/results/clientpositive/llap/vectorization_0.q.out index 1382503..2a8ae49 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_0.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_0.q.out @@ -74,8 +74,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -255,8 +255,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -425,8 +425,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -586,8 +586,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -767,8 +767,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -937,8 +937,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1098,8 +1098,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1279,8 +1279,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1449,8 +1449,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1656,8 +1656,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_1.q.out b/ql/src/test/results/clientpositive/llap/vectorization_1.q.out index c87926c..bdc4de4 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_1.q.out @@ -107,8 +107,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_10.q.out b/ql/src/test/results/clientpositive/llap/vectorization_10.q.out index f4069a0..5cba462 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_10.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_10.q.out @@ -98,8 +98,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_11.q.out b/ql/src/test/results/clientpositive/llap/vectorization_11.q.out index 17933b7..45a3e58 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_11.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_11.q.out @@ -80,8 +80,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_12.q.out b/ql/src/test/results/clientpositive/llap/vectorization_12.q.out index 0ead6c4..7b508a0 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_12.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_12.q.out @@ -134,8 +134,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_13.q.out b/ql/src/test/results/clientpositive/llap/vectorization_13.q.out index d72c298..222d232 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_13.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_13.q.out @@ -136,8 +136,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -488,8 +488,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_14.q.out b/ql/src/test/results/clientpositive/llap/vectorization_14.q.out index 7ae99a3..6f99dde 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_14.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_14.q.out @@ -136,8 +136,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_15.q.out b/ql/src/test/results/clientpositive/llap/vectorization_15.q.out index 31363df..1f13589 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_15.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_15.q.out @@ -132,8 +132,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_16.q.out b/ql/src/test/results/clientpositive/llap/vectorization_16.q.out index 59f2d10..33e72fc 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_16.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_16.q.out @@ -109,8 +109,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_17.q.out b/ql/src/test/results/clientpositive/llap/vectorization_17.q.out index d0b2f7a..4d5f2e5 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_17.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_17.q.out @@ -102,8 +102,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_2.q.out b/ql/src/test/results/clientpositive/llap/vectorization_2.q.out index 83833da..29850bb 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_2.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_2.q.out @@ -111,8 +111,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_3.q.out b/ql/src/test/results/clientpositive/llap/vectorization_3.q.out index 3c502cd..97ad680 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_3.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_3.q.out @@ -116,8 +116,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_4.q.out b/ql/src/test/results/clientpositive/llap/vectorization_4.q.out index a8cfa48..c459a8c 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_4.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_4.q.out @@ -111,8 +111,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_5.q.out b/ql/src/test/results/clientpositive/llap/vectorization_5.q.out index 5124740..14a4691 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_5.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_5.q.out @@ -104,8 +104,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_6.q.out b/ql/src/test/results/clientpositive/llap/vectorization_6.q.out index 4e43bd0..d863334 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_6.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_6.q.out @@ -92,8 +92,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_7.q.out b/ql/src/test/results/clientpositive/llap/vectorization_7.q.out index 907411b..b0e682a 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_7.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_7.q.out @@ -108,8 +108,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -355,8 +355,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_8.q.out b/ql/src/test/results/clientpositive/llap/vectorization_8.q.out index 64480d7..3a09542 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_8.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_8.q.out @@ -104,8 +104,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -338,8 +338,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_9.q.out b/ql/src/test/results/clientpositive/llap/vectorization_9.q.out index 59f2d10..33e72fc 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_9.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_9.q.out @@ -109,8 +109,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out index c83d5e6..f19d8a6 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out @@ -72,8 +72,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_div0.q.out b/ql/src/test/results/clientpositive/llap/vectorization_div0.q.out index 61f1e26..8f4acba 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_div0.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_div0.q.out @@ -54,8 +54,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -273,8 +273,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -492,8 +492,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -711,8 +711,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_limit.q.out b/ql/src/test/results/clientpositive/llap/vectorization_limit.q.out index 6ea5fb2..ded40fd 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_limit.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_limit.q.out @@ -43,8 +43,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -156,8 +156,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -314,8 +314,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -519,8 +519,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -678,8 +678,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -915,8 +915,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out b/ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out index e6427fa..1a87d1d 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out @@ -64,8 +64,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out b/ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out index 80c7c0c..89c140e 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out @@ -85,8 +85,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out b/ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out index 43f999e..fb5ff60 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out @@ -46,8 +46,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out index 0830d1c..d12e038 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out @@ -136,8 +136,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -398,8 +398,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -652,8 +652,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -885,8 +885,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1115,8 +1115,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1412,8 +1412,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1659,8 +1659,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1963,8 +1963,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2223,8 +2223,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2500,8 +2500,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2821,8 +2821,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3223,8 +3223,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3458,8 +3458,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3573,8 +3573,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3760,8 +3760,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3875,8 +3875,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3990,8 +3990,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4105,8 +4105,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4220,8 +4220,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4335,8 +4335,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out b/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out index 61c5051..dfe5279 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out @@ -145,8 +145,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -180,8 +180,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -280,8 +280,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -415,8 +415,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -451,8 +451,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_case.q.out b/ql/src/test/results/clientpositive/llap/vectorized_case.q.out index 18c23d6..e4ab571 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_case.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_case.q.out @@ -86,8 +86,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -235,8 +235,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -326,8 +326,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -470,8 +470,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -586,7 +586,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:member:decimal(10,0), 1:attr:decimal(10,0), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:member:decimal(10,0)/DECIMAL_64, 1:attr:decimal(10,0)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: CASE WHEN ((member = 1)) THEN ((attr + 1)) ELSE ((attr + 2)) END (type: decimal(11,0)) outputColumnNames: _col0 @@ -594,7 +594,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [6] - selectExpressions: IfExprCondExprCondExpr(col 3:boolean, col 4:decimal(11,0)col 5:decimal(11,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, DecimalColAddDecimalScalar(col 1:decimal(10,0), val 1) -> 4:decimal(11,0), DecimalColAddDecimalScalar(col 1:decimal(10,0), val 2) -> 5:decimal(11,0)) -> 6:decimal(11,0) + selectExpressions: IfExprCondExprCondExpr(col 3:boolean, col 7:decimal(11,0)col 8:decimal(11,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, ConvertDecimal64ToDecimal(col 4:decimal(11,0)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 1, decimalVal 1) -> 4:decimal(11,0)/DECIMAL_64) -> 7:decimal(11,0), ConvertDecimal64ToDecimal(col 5:decimal(11,0)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 2, decimalVal 2) -> 5:decimal(11,0)/DECIMAL_64) -> 8:decimal(11,0)) -> 6:decimal(11,0) Statistics: Num rows: 3 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false @@ -611,8 +611,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -620,9 +620,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: member:decimal(10,0), attr:decimal(10,0) + dataColumns: member:decimal(10,0)/DECIMAL_64, attr:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, decimal(11,0), decimal(11,0), decimal(11,0)] + scratchColumnTypeNames: [bigint, decimal(11,0)/DECIMAL_64, decimal(11,0)/DECIMAL_64, decimal(11,0), decimal(11,0), decimal(11,0)] Stage: Stage-0 Fetch Operator @@ -667,15 +667,15 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:member:decimal(10,0), 1:attr:decimal(10,0), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:member:decimal(10,0)/DECIMAL_64, 1:attr:decimal(10,0)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: CASE WHEN ((member = 1)) THEN (1) ELSE ((attr + 2)) END (type: decimal(11,0)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [6] - selectExpressions: IfExprColumnCondExpr(col 3:boolean, col 4:decimal(1,0)col 5:decimal(11,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, ConstantVectorExpression(val 1) -> 4:decimal(1,0), DecimalColAddDecimalScalar(col 1:decimal(10,0), val 2) -> 5:decimal(11,0)) -> 6:decimal(11,0) + projectedOutputColumnNums: [8] + selectExpressions: VectorUDFAdaptor(CASE WHEN ((member = 1)) THEN (1) ELSE ((attr + 2)) END)(children: VectorUDFAdaptor((member = 1)) -> 6:boolean, Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 2, decimalVal 2) -> 7:decimal(11,0)/DECIMAL_64) -> 8:decimal(11,0) Statistics: Num rows: 3 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false @@ -692,8 +692,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -701,9 +701,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: member:decimal(10,0), attr:decimal(10,0) + dataColumns: member:decimal(10,0)/DECIMAL_64, attr:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, decimal(1,0), decimal(11,0), decimal(11,0)] + scratchColumnTypeNames: [bigint, decimal(1,0), decimal(11,0)/DECIMAL_64, bigint, decimal(11,0)/DECIMAL_64, decimal(11,0)] Stage: Stage-0 Fetch Operator @@ -748,15 +748,15 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:member:decimal(10,0), 1:attr:decimal(10,0), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:member:decimal(10,0)/DECIMAL_64, 1:attr:decimal(10,0)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: CASE WHEN ((member = 1)) THEN ((attr + 1)) ELSE (2) END (type: decimal(11,0)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [6] - selectExpressions: IfExprCondExprColumn(col 3:boolean, col 4:decimal(11,0), col 5:decimal(1,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, DecimalColAddDecimalScalar(col 1:decimal(10,0), val 1) -> 4:decimal(11,0), ConstantVectorExpression(val 2) -> 5:decimal(1,0)) -> 6:decimal(11,0) + projectedOutputColumnNums: [8] + selectExpressions: VectorUDFAdaptor(CASE WHEN ((member = 1)) THEN ((attr + 1)) ELSE (2) END)(children: VectorUDFAdaptor((member = 1)) -> 6:boolean, Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 1, decimalVal 1) -> 7:decimal(11,0)/DECIMAL_64) -> 8:decimal(11,0) Statistics: Num rows: 3 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false @@ -773,8 +773,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -782,9 +782,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: member:decimal(10,0), attr:decimal(10,0) + dataColumns: member:decimal(10,0)/DECIMAL_64, attr:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, decimal(11,0), decimal(1,0), decimal(11,0)] + scratchColumnTypeNames: [bigint, decimal(11,0)/DECIMAL_64, decimal(1,0), bigint, decimal(11,0)/DECIMAL_64, decimal(11,0)] Stage: Stage-0 Fetch Operator @@ -872,8 +872,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -953,8 +953,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1034,8 +1034,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out b/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out index b22b5ac..8f5ce87 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out @@ -200,8 +200,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vectorized_context.q.out b/ql/src/test/results/clientpositive/llap/vectorized_context.q.out index 3edc12b..778ba26 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_context.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_context.q.out @@ -163,8 +163,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -191,8 +191,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -220,8 +220,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out b/ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out index 8ad2017..c66f3d4 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out @@ -288,8 +288,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -580,8 +580,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -876,8 +876,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1150,8 +1150,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1280,8 +1280,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out b/ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out index de3c6e6..7e78360 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out @@ -93,8 +93,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -226,8 +226,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out index d3ab509..8ee96d3 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out @@ -82,8 +82,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -261,8 +260,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -305,8 +303,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -419,8 +417,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -448,8 +445,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -578,8 +575,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -622,8 +618,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -666,8 +662,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -804,8 +800,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -833,8 +828,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -862,8 +857,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1006,8 +1001,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1065,8 +1059,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1179,8 +1173,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1208,8 +1201,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1333,8 +1326,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1377,8 +1369,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1491,8 +1483,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1520,8 +1511,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1643,8 +1634,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1687,8 +1677,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1801,8 +1791,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1845,8 +1834,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1959,8 +1948,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1988,8 +1976,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2102,8 +2090,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2131,8 +2118,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2258,8 +2245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2302,8 +2288,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2437,8 +2423,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2577,8 +2562,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2605,8 +2589,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2723,8 +2707,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2782,8 +2765,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2896,8 +2879,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2940,8 +2922,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3052,8 +3034,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3078,8 +3060,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3172,8 +3153,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3216,8 +3196,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3314,8 +3294,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3358,8 +3337,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3402,8 +3381,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3537,8 +3516,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3594,8 +3573,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3722,8 +3701,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3752,8 +3730,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3782,8 +3759,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3982,8 +3958,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4012,8 +3987,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4042,8 +4016,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4247,8 +4220,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4277,8 +4249,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4305,8 +4276,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4335,8 +4305,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4597,8 +4566,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4641,8 +4609,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4773,8 +4741,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4817,8 +4784,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4861,8 +4828,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4983,8 +4950,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5042,8 +5008,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5161,8 +5127,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5205,8 +5170,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5313,8 +5278,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5357,8 +5321,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5465,8 +5429,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5509,8 +5472,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5625,8 +5588,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5779,8 +5741,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5823,8 +5784,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5914,8 +5875,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5940,8 +5901,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6015,8 +5975,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6057,8 +6016,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6157,8 +6116,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6201,8 +6159,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6245,8 +6203,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6355,8 +6313,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6425,8 +6383,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6533,8 +6491,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6563,8 +6520,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6593,8 +6549,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out index 76d368c..2aa9af2 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out @@ -84,8 +84,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -155,8 +155,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -318,8 +318,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -389,8 +389,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -552,8 +552,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -623,8 +623,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -787,8 +787,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -858,8 +858,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -929,8 +929,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1124,8 +1124,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1223,8 +1223,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1415,8 +1415,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1486,8 +1486,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out index 37eebe4..c03b298 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out @@ -226,10 +226,14 @@ POSTHOOK: Input: default@dsrv2_big POSTHOOK: Input: default@dsrv2_small #### A masked pattern was here #### 20 -PREHOOK: query: EXPLAIN select count(*) from dsrv2_big a join dsrv2_small b on (a.partkey_decimal = b.partkey_decimal) +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL select count(*) from dsrv2_big a join dsrv2_small b on (a.partkey_decimal = b.partkey_decimal) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN select count(*) from dsrv2_big a join dsrv2_small b on (a.partkey_decimal = b.partkey_decimal) +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL select count(*) from dsrv2_big a join dsrv2_small b on (a.partkey_decimal = b.partkey_decimal) POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -251,53 +255,134 @@ STAGE PLANS: alias: a filterExpr: (partkey_decimal is not null and (partkey_decimal BETWEEN DynamicValue(RS_7_b_partkey_decimal_min) AND DynamicValue(RS_7_b_partkey_decimal_max) and in_bloom_filter(partkey_decimal, DynamicValue(RS_7_b_partkey_decimal_bloom_filter)))) (type: boolean) Statistics: Num rows: 100 Data size: 11200 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:partkey_bigint:bigint, 1:partkey_decimal:decimal(10,1)/DECIMAL_64, 2:partkey_double:double, 3:shipdate_date:date, 4:shipdate_ts:timestamp, 5:shipdate_string:string, 6:shipdate_char:char(10), 7:shipdate_varchar:varchar(10), 8:ROW__ID:struct] Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 9:decimal(10,1))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,1)/DECIMAL_64) -> 9:decimal(10,1)), FilterExprAndExpr(children: FilterDecimalColumnBetweenDynamicValue(col 9:decimal(10,1), left 0, right 0)(children: ConvertDecimal64ToDecimal(col 1:decimal(10,1)/DECIMAL_64) -> 9:decimal(10,1)), VectorInBloomFilterColDynamicValue(children: ConvertDecimal64ToDecimal(col 1:decimal(10,1)/DECIMAL_64) -> 9:decimal(10,1)))) predicate: ((partkey_decimal BETWEEN DynamicValue(RS_7_b_partkey_decimal_min) AND DynamicValue(RS_7_b_partkey_decimal_max) and in_bloom_filter(partkey_decimal, DynamicValue(RS_7_b_partkey_decimal_bloom_filter))) and partkey_decimal is not null) (type: boolean) Statistics: Num rows: 100 Data size: 11200 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: partkey_decimal (type: decimal(10,1)) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1] Statistics: Num rows: 100 Data size: 11200 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,1)) sort order: + Map-reduce partition columns: _col0 (type: decimal(10,1)) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumnNums: [1] + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumnNums: [] Statistics: Num rows: 100 Data size: 11200 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 8 + includeColumns: [1] + dataColumns: partkey_bigint:bigint, partkey_decimal:decimal(10,1)/DECIMAL_64, partkey_double:double, shipdate_date:date, shipdate_ts:timestamp, shipdate_string:string, shipdate_char:char(10), shipdate_varchar:varchar(10) + partitionColumnCount: 0 + scratchColumnTypeNames: [decimal(10,1)] Map 4 Map Operator Tree: TableScan alias: b filterExpr: partkey_decimal is not null (type: boolean) Statistics: Num rows: 20 Data size: 2240 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:partkey_bigint:bigint, 1:partkey_decimal:decimal(10,1)/DECIMAL_64, 2:partkey_double:double, 3:shipdate_date:date, 4:shipdate_ts:timestamp, 5:shipdate_string:string, 6:shipdate_char:char(10), 7:shipdate_varchar:varchar(10), 8:ROW__ID:struct] Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 9:decimal(10,1))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,1)/DECIMAL_64) -> 9:decimal(10,1)) predicate: partkey_decimal is not null (type: boolean) Statistics: Num rows: 20 Data size: 2240 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: partkey_decimal (type: decimal(10,1)) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1] Statistics: Num rows: 20 Data size: 2240 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,1)) sort order: + Map-reduce partition columns: _col0 (type: decimal(10,1)) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumnNums: [1] + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumnNums: [] Statistics: Num rows: 20 Data size: 2240 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: _col0 (type: decimal(10,1)) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1] Statistics: Num rows: 20 Data size: 2240 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: min(_col0), max(_col0), bloom_filter(_col0, expectedEntries=20) + Group By Vectorization: + aggregators: VectorUDAFMinDecimal64(col 1:decimal(10,1)/DECIMAL_64) -> decimal(10,1)/DECIMAL_64, VectorUDAFMaxDecimal64(col 1:decimal(10,1)/DECIMAL_64) -> decimal(10,1)/DECIMAL_64, VectorUDAFBloomFilter(ConvertDecimal64ToDecimal(col 1:decimal(10,1)/DECIMAL_64) -> 9:decimal(10,1)) -> binary + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0, 1, 2] mode: hash outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + keyColumnNums: [] + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumnNums: [0, 1, 2] Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,1)), _col1 (type: decimal(10,1)), _col2 (type: binary) Execution mode: vectorized, llap LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 8 + includeColumns: [1] + dataColumns: partkey_bigint:bigint, partkey_decimal:decimal(10,1)/DECIMAL_64, partkey_double:double, shipdate_date:date, shipdate_ts:timestamp, shipdate_string:string, shipdate_char:char(10), shipdate_varchar:varchar(10) + partitionColumnCount: 0 + scratchColumnTypeNames: [decimal(10,1)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -319,14 +404,37 @@ STAGE PLANS: value expressions: _col0 (type: bigint) Reducer 3 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFCountMerge(col 0:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] mode: mergepartial outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -334,14 +442,40 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Reducer 5 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: VALUE._col0:decimal(10,1), VALUE._col1:decimal(10,1), VALUE._col2:binary + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: min(VALUE._col0), max(VALUE._col1), bloom_filter(VALUE._col2, expectedEntries=20) + Group By Vectorization: + aggregators: VectorUDAFMinDecimal(col 0:decimal(10,1)) -> decimal(10,1), VectorUDAFMaxDecimal(col 1:decimal(10,1)) -> decimal(10,1), VectorUDAFBloomFilterMerge(col 2:binary) -> binary + className: VectorGroupByOperator + groupByMode: FINAL + native: false + vectorProcessingMode: STREAMING + projectedOutputColumnNums: [0, 1, 2] mode: final outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + keyColumnNums: [] + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumnNums: [0, 1, 2] Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,1)), _col1 (type: decimal(10,1)), _col2 (type: binary) diff --git a/ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out index b30fbf3..53cb943 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out @@ -93,8 +93,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -135,8 +135,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out b/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out index f05e5c0..053826e 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out @@ -122,12 +122,12 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 580 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:decimal0801_col:decimal(8,1), 1:int_col_1:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:decimal0801_col:decimal(8,1)/DECIMAL_64, 1:int_col_1:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 0:decimal(8,1)), SelectColumnIsNotNull(col 1:int)) + predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 3:decimal(8,1))(children: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1)), SelectColumnIsNotNull(col 1:int)) predicate: (decimal0801_col is not null and int_col_1 is not null) (type: boolean) Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -146,12 +146,13 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableRetainedColumnNums: [3] + bigTableValueColumnNums: [3] + bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1) className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + projectedOutputColumnNums: [3] outputColumnNames: _col0 input vertices: 1 Reducer 3 @@ -171,8 +172,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -180,9 +181,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: decimal0801_col:decimal(8,1), int_col_1:int + dataColumns: decimal0801_col:decimal(8,1)/DECIMAL_64, int_col_1:int partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(8,1)] Map 2 Map Operator Tree: TableScan @@ -226,8 +227,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -325,12 +326,12 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 580 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:decimal0801_col:decimal(8,1), 1:int_col_1:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:decimal0801_col:decimal(8,1)/DECIMAL_64, 1:int_col_1:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 1:int), SelectColumnIsNotNull(col 0:decimal(8,1))) + predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 1:int), SelectColumnIsNotNull(col 3:decimal(8,1))(children: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1))) predicate: (decimal0801_col is not null and int_col_1 is not null) (type: boolean) Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -349,12 +350,13 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableRetainedColumnNums: [3] + bigTableValueColumnNums: [3] + bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1) className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + projectedOutputColumnNums: [3] outputColumnNames: _col0 input vertices: 1 Reducer 3 @@ -374,8 +376,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -383,9 +385,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: decimal0801_col:decimal(8,1), int_col_1:int + dataColumns: decimal0801_col:decimal(8,1)/DECIMAL_64, int_col_1:int partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(8,1)] Map 2 Map Operator Tree: TableScan @@ -429,8 +431,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -528,12 +530,12 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 580 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:decimal0801_col:decimal(8,1), 1:int_col_1:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:decimal0801_col:decimal(8,1)/DECIMAL_64, 1:int_col_1:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 0:decimal(8,1)), SelectColumnIsNotNull(col 1:int)) + predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 3:decimal(8,1))(children: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1)), SelectColumnIsNotNull(col 1:int)) predicate: (decimal0801_col is not null and int_col_1 is not null) (type: boolean) Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -552,12 +554,13 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableRetainedColumnNums: [3] + bigTableValueColumnNums: [3] + bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1) className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + projectedOutputColumnNums: [3] outputColumnNames: _col0 input vertices: 1 Reducer 3 @@ -577,8 +580,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -586,9 +589,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: decimal0801_col:decimal(8,1), int_col_1:int + dataColumns: decimal0801_col:decimal(8,1)/DECIMAL_64, int_col_1:int partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(8,1)] Map 2 Map Operator Tree: TableScan @@ -632,8 +635,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out b/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out index 35786eb..57b5845 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out @@ -153,8 +153,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out index d27a123..7546dbb 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out @@ -69,8 +69,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -98,8 +98,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -126,8 +126,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out b/ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out index 045a6ad..53bd3c9 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out @@ -167,8 +167,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -387,8 +387,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -430,8 +430,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -640,8 +640,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -799,8 +799,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1018,8 +1018,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1240,8 +1240,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1464,8 +1464,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1507,8 +1507,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1685,8 +1685,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1723,8 +1723,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2307,8 +2307,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2525,8 +2525,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2813,8 +2813,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3035,8 +3035,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3078,8 +3078,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3328,8 +3328,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3566,8 +3566,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3829,8 +3829,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4345,8 +4345,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4663,8 +4663,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4977,8 +4977,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5299,8 +5299,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5632,8 +5632,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5934,8 +5934,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out b/ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out index 690f0ae..68c56ed 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out @@ -60,8 +60,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -102,8 +102,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out b/ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out index 1f6e152..aecd67e 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out @@ -79,8 +79,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out b/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out index 8006a71..dfab9db 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out @@ -166,8 +166,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -298,8 +298,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -391,8 +391,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -532,8 +532,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out index 79ba4c6..34e2fba 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out @@ -285,8 +285,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -495,8 +495,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -689,8 +689,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -883,8 +883,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1026,8 +1026,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1153,8 +1153,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1298,8 +1298,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out b/ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out index 04cb482..82d43c1 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out @@ -85,8 +85,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -249,8 +249,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/mergejoin.q.out b/ql/src/test/results/clientpositive/mergejoin.q.out index 22d826b..7cbcbbe 100644 --- a/ql/src/test/results/clientpositive/mergejoin.q.out +++ b/ql/src/test/results/clientpositive/mergejoin.q.out @@ -2982,8 +2982,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3141,8 +3141,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/orc_file_dump.q.out b/ql/src/test/results/clientpositive/orc_file_dump.q.out index 8ec71a9..5101312 100644 --- a/ql/src/test/results/clientpositive/orc_file_dump.q.out +++ b/ql/src/test/results/clientpositive/orc_file_dump.q.out @@ -111,7 +111,7 @@ Stripe Statistics: Column 7: count: 1049 hasNull: false bytesOnDisk: 137 true: 526 Column 8: count: 1049 hasNull: false bytesOnDisk: 3430 min: max: zach zipper sum: 13443 Column 9: count: 1049 hasNull: false bytesOnDisk: 1802 min: 2013-03-01 09:11:58.703 max: 2013-03-01 09:11:58.703 min UTC: 2013-03-01 01:11:58.703 max UTC: 2013-03-01 01:11:58.703 - Column 10: count: 1049 hasNull: false bytesOnDisk: 2369 min: 0 max: 99.94 sum: 53646.16 + Column 10: count: 1049 hasNull: false bytesOnDisk: 2181 min: 0 max: 99.94 sum: 53646.16 Column 11: count: 1049 hasNull: false bytesOnDisk: 2468 sum: 13278 File Statistics: @@ -125,11 +125,11 @@ File Statistics: Column 7: count: 1049 hasNull: false bytesOnDisk: 137 true: 526 Column 8: count: 1049 hasNull: false bytesOnDisk: 3430 min: max: zach zipper sum: 13443 Column 9: count: 1049 hasNull: false bytesOnDisk: 1802 min: 2013-03-01 09:11:58.703 max: 2013-03-01 09:11:58.703 min UTC: 2013-03-01 01:11:58.703 max UTC: 2013-03-01 01:11:58.703 - Column 10: count: 1049 hasNull: false bytesOnDisk: 2369 min: 0 max: 99.94 sum: 53646.16 + Column 10: count: 1049 hasNull: false bytesOnDisk: 2181 min: 0 max: 99.94 sum: 53646.16 Column 11: count: 1049 hasNull: false bytesOnDisk: 2468 sum: 13278 Stripes: - Stripe: offset: 3 data: 22593 rows: 1049 tail: 252 index: 8956 + Stripe: offset: 3 data: 22405 rows: 1049 tail: 253 index: 8956 Stream: column 0 section ROW_INDEX start: 3 length 20 Stream: column 0 section BLOOM_FILTER_UTF8 start: 23 length 34 Stream: column 1 section ROW_INDEX start: 57 length 58 @@ -168,10 +168,10 @@ Stripes: Stream: column 8 section DICTIONARY_DATA start: 23365 length 1548 Stream: column 9 section DATA start: 24913 length 19 Stream: column 9 section SECONDARY start: 24932 length 1783 - Stream: column 10 section DATA start: 26715 length 2138 - Stream: column 10 section SECONDARY start: 28853 length 231 - Stream: column 11 section DATA start: 29084 length 1877 - Stream: column 11 section LENGTH start: 30961 length 591 + Stream: column 10 section DATA start: 26715 length 2166 + Stream: column 10 section SECONDARY start: 28881 length 15 + Stream: column 11 section DATA start: 28896 length 1877 + Stream: column 11 section LENGTH start: 30773 length 591 Encoding column 0: DIRECT Encoding column 1: DIRECT Encoding column 2: DIRECT_V2 @@ -256,7 +256,7 @@ Stripes: Stripe level merge: numHashFunctions: 4 bitCount: 6272 popCount: 4 loadFactor: 0.0006 expectedFpp: 1.6543056E-13 Row group indices for column 10: Entry 0: count: 1000 hasNull: false min: 0 max: 9994 sum: 5118211 positions: 0,0,0,0,0 - Entry 1: count: 49 hasNull: false min: 0 max: 9490 sum: 246405 positions: 0,2159,0,476,4 + Entry 1: count: 49 hasNull: false min: 0 max: 9490 sum: 246405 positions: 0,2194,0,4,488 Bloom filters for column 10: Entry 0: numHashFunctions: 4 bitCount: 6272 popCount: 2848 loadFactor: 0.4541 expectedFpp: 0.042514365 Entry 1: numHashFunctions: 4 bitCount: 6272 popCount: 194 loadFactor: 0.0309 expectedFpp: 9.153406E-7 @@ -269,7 +269,7 @@ Stripes: Entry 1: numHashFunctions: 4 bitCount: 6272 popCount: 98 loadFactor: 0.0156 expectedFpp: 5.9604645E-8 Stripe level merge: numHashFunctions: 4 bitCount: 6272 popCount: 102 loadFactor: 0.0163 expectedFpp: 6.9948186E-8 -File length: 32494 bytes +File length: 32309 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ @@ -308,7 +308,7 @@ Stripe Statistics: Column 7: count: 1049 hasNull: false bytesOnDisk: 137 true: 526 Column 8: count: 1049 hasNull: false bytesOnDisk: 3430 min: max: zach zipper sum: 13443 Column 9: count: 1049 hasNull: false bytesOnDisk: 1802 min: 2013-03-01 09:11:58.703 max: 2013-03-01 09:11:58.703 min UTC: 2013-03-01 01:11:58.703 max UTC: 2013-03-01 01:11:58.703 - Column 10: count: 1049 hasNull: false bytesOnDisk: 2369 min: 0 max: 99.94 sum: 53646.16 + Column 10: count: 1049 hasNull: false bytesOnDisk: 2181 min: 0 max: 99.94 sum: 53646.16 Column 11: count: 1049 hasNull: false bytesOnDisk: 2468 sum: 13278 File Statistics: @@ -322,11 +322,11 @@ File Statistics: Column 7: count: 1049 hasNull: false bytesOnDisk: 137 true: 526 Column 8: count: 1049 hasNull: false bytesOnDisk: 3430 min: max: zach zipper sum: 13443 Column 9: count: 1049 hasNull: false bytesOnDisk: 1802 min: 2013-03-01 09:11:58.703 max: 2013-03-01 09:11:58.703 min UTC: 2013-03-01 01:11:58.703 max UTC: 2013-03-01 01:11:58.703 - Column 10: count: 1049 hasNull: false bytesOnDisk: 2369 min: 0 max: 99.94 sum: 53646.16 + Column 10: count: 1049 hasNull: false bytesOnDisk: 2181 min: 0 max: 99.94 sum: 53646.16 Column 11: count: 1049 hasNull: false bytesOnDisk: 2468 sum: 13278 Stripes: - Stripe: offset: 3 data: 22593 rows: 1049 tail: 250 index: 13603 + Stripe: offset: 3 data: 22405 rows: 1049 tail: 248 index: 13603 Stream: column 0 section ROW_INDEX start: 3 length 20 Stream: column 0 section BLOOM_FILTER_UTF8 start: 23 length 43 Stream: column 1 section ROW_INDEX start: 66 length 58 @@ -365,10 +365,10 @@ Stripes: Stream: column 8 section DICTIONARY_DATA start: 28012 length 1548 Stream: column 9 section DATA start: 29560 length 19 Stream: column 9 section SECONDARY start: 29579 length 1783 - Stream: column 10 section DATA start: 31362 length 2138 - Stream: column 10 section SECONDARY start: 33500 length 231 - Stream: column 11 section DATA start: 33731 length 1877 - Stream: column 11 section LENGTH start: 35608 length 591 + Stream: column 10 section DATA start: 31362 length 2166 + Stream: column 10 section SECONDARY start: 33528 length 15 + Stream: column 11 section DATA start: 33543 length 1877 + Stream: column 11 section LENGTH start: 35420 length 591 Encoding column 0: DIRECT Encoding column 1: DIRECT Encoding column 2: DIRECT_V2 @@ -453,7 +453,7 @@ Stripes: Stripe level merge: numHashFunctions: 7 bitCount: 9600 popCount: 7 loadFactor: 0.0007 expectedFpp: 1.0959422E-22 Row group indices for column 10: Entry 0: count: 1000 hasNull: false min: 0 max: 9994 sum: 5118211 positions: 0,0,0,0,0 - Entry 1: count: 49 hasNull: false min: 0 max: 9490 sum: 246405 positions: 0,2159,0,476,4 + Entry 1: count: 49 hasNull: false min: 0 max: 9490 sum: 246405 positions: 0,2194,0,4,488 Bloom filters for column 10: Entry 0: numHashFunctions: 7 bitCount: 9600 popCount: 4796 loadFactor: 0.4996 expectedFpp: 0.0077670407 Entry 1: numHashFunctions: 7 bitCount: 9600 popCount: 339 loadFactor: 0.0353 expectedFpp: 6.846983E-11 @@ -466,7 +466,7 @@ Stripes: Entry 1: numHashFunctions: 7 bitCount: 9600 popCount: 174 loadFactor: 0.0181 expectedFpp: 6.426078E-13 Stripe level merge: numHashFunctions: 7 bitCount: 9600 popCount: 181 loadFactor: 0.0189 expectedFpp: 8.4693775E-13 -File length: 37141 bytes +File length: 36950 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ @@ -517,7 +517,7 @@ Stripe Statistics: Column 7: count: 1049 hasNull: false bytesOnDisk: 137 true: 526 Column 8: count: 1049 hasNull: false bytesOnDisk: 3430 min: max: zach zipper sum: 13443 Column 9: count: 1049 hasNull: false bytesOnDisk: 1802 min: 2013-03-01 09:11:58.703 max: 2013-03-01 09:11:58.703 min UTC: 2013-03-01 01:11:58.703 max UTC: 2013-03-01 01:11:58.703 - Column 10: count: 1049 hasNull: false bytesOnDisk: 2369 min: 0 max: 99.94 sum: 53646.16 + Column 10: count: 1049 hasNull: false bytesOnDisk: 2181 min: 0 max: 99.94 sum: 53646.16 Column 11: count: 1049 hasNull: false bytesOnDisk: 2468 sum: 13278 File Statistics: @@ -531,11 +531,11 @@ File Statistics: Column 7: count: 1049 hasNull: false bytesOnDisk: 137 true: 526 Column 8: count: 1049 hasNull: false bytesOnDisk: 3430 min: max: zach zipper sum: 13443 Column 9: count: 1049 hasNull: false bytesOnDisk: 1802 min: 2013-03-01 09:11:58.703 max: 2013-03-01 09:11:58.703 min UTC: 2013-03-01 01:11:58.703 max UTC: 2013-03-01 01:11:58.703 - Column 10: count: 1049 hasNull: false bytesOnDisk: 2369 min: 0 max: 99.94 sum: 53646.16 + Column 10: count: 1049 hasNull: false bytesOnDisk: 2181 min: 0 max: 99.94 sum: 53646.16 Column 11: count: 1049 hasNull: false bytesOnDisk: 2468 sum: 13278 Stripes: - Stripe: offset: 3 data: 22593 rows: 1049 tail: 252 index: 8956 + Stripe: offset: 3 data: 22405 rows: 1049 tail: 253 index: 8956 Stream: column 0 section ROW_INDEX start: 3 length 20 Stream: column 0 section BLOOM_FILTER_UTF8 start: 23 length 34 Stream: column 1 section ROW_INDEX start: 57 length 58 @@ -574,10 +574,10 @@ Stripes: Stream: column 8 section DICTIONARY_DATA start: 23365 length 1548 Stream: column 9 section DATA start: 24913 length 19 Stream: column 9 section SECONDARY start: 24932 length 1783 - Stream: column 10 section DATA start: 26715 length 2138 - Stream: column 10 section SECONDARY start: 28853 length 231 - Stream: column 11 section DATA start: 29084 length 1877 - Stream: column 11 section LENGTH start: 30961 length 591 + Stream: column 10 section DATA start: 26715 length 2166 + Stream: column 10 section SECONDARY start: 28881 length 15 + Stream: column 11 section DATA start: 28896 length 1877 + Stream: column 11 section LENGTH start: 30773 length 591 Encoding column 0: DIRECT Encoding column 1: DIRECT Encoding column 2: DIRECT_V2 @@ -662,7 +662,7 @@ Stripes: Stripe level merge: numHashFunctions: 4 bitCount: 6272 popCount: 4 loadFactor: 0.0006 expectedFpp: 1.6543056E-13 Row group indices for column 10: Entry 0: count: 1000 hasNull: false min: 0 max: 9994 sum: 5118211 positions: 0,0,0,0,0 - Entry 1: count: 49 hasNull: false min: 0 max: 9490 sum: 246405 positions: 0,2159,0,476,4 + Entry 1: count: 49 hasNull: false min: 0 max: 9490 sum: 246405 positions: 0,2194,0,4,488 Bloom filters for column 10: Entry 0: numHashFunctions: 4 bitCount: 6272 popCount: 2848 loadFactor: 0.4541 expectedFpp: 0.042514365 Entry 1: numHashFunctions: 4 bitCount: 6272 popCount: 194 loadFactor: 0.0309 expectedFpp: 9.153406E-7 @@ -675,7 +675,7 @@ Stripes: Entry 1: numHashFunctions: 4 bitCount: 6272 popCount: 98 loadFactor: 0.0156 expectedFpp: 5.9604645E-8 Stripe level merge: numHashFunctions: 4 bitCount: 6272 popCount: 102 loadFactor: 0.0163 expectedFpp: 6.9948186E-8 -File length: 32494 bytes +File length: 32309 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ diff --git a/ql/src/test/results/clientpositive/orc_merge11.q.out b/ql/src/test/results/clientpositive/orc_merge11.q.out index 1b2ddd3..8e7840c 100644 --- a/ql/src/test/results/clientpositive/orc_merge11.q.out +++ b/ql/src/test/results/clientpositive/orc_merge11.q.out @@ -6,11 +6,11 @@ PREHOOK: query: DROP TABLE orc_split_elim_n0 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE orc_split_elim_n0 POSTHOOK: type: DROPTABLE -PREHOOK: query: create table orc_split_elim_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_split_elim_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_split_elim_n0 -POSTHOOK: query: create table orc_split_elim_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_split_elim_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_split_elim_n0 @@ -30,36 +30,36 @@ POSTHOOK: query: load data local inpath '../../data/files/orc_split_elim.orc' in POSTHOOK: type: LOAD #### A masked pattern was here #### POSTHOOK: Output: default@orc_split_elim_n0 -PREHOOK: query: create table orcfile_merge1_n2 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc tblproperties("orc.compress.size"="4096") +PREHOOK: query: create table orcfile_merge1_n2 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc tblproperties("orc.compress.size"="4096") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orcfile_merge1_n2 -POSTHOOK: query: create table orcfile_merge1_n2 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc tblproperties("orc.compress.size"="4096") +POSTHOOK: query: create table orcfile_merge1_n2 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc tblproperties("orc.compress.size"="4096") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orcfile_merge1_n2 -PREHOOK: query: insert overwrite table orcfile_merge1_n2 select * from orc_split_elim_n0 +PREHOOK: query: insert overwrite table orcfile_merge1_n2 select * from orc_split_elim_n0 order by userid PREHOOK: type: QUERY PREHOOK: Input: default@orc_split_elim_n0 PREHOOK: Output: default@orcfile_merge1_n2 -POSTHOOK: query: insert overwrite table orcfile_merge1_n2 select * from orc_split_elim_n0 +POSTHOOK: query: insert overwrite table orcfile_merge1_n2 select * from orc_split_elim_n0 order by userid POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_split_elim_n0 POSTHOOK: Output: default@orcfile_merge1_n2 -POSTHOOK: Lineage: orcfile_merge1_n2.decimal1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orcfile_merge1_n2.decimal1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.string1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.subtype SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.ts SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.userid SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:userid, type:bigint, comment:null), ] -PREHOOK: query: insert into table orcfile_merge1_n2 select * from orc_split_elim_n0 +PREHOOK: query: insert into table orcfile_merge1_n2 select * from orc_split_elim_n0 order by userid PREHOOK: type: QUERY PREHOOK: Input: default@orc_split_elim_n0 PREHOOK: Output: default@orcfile_merge1_n2 -POSTHOOK: query: insert into table orcfile_merge1_n2 select * from orc_split_elim_n0 +POSTHOOK: query: insert into table orcfile_merge1_n2 select * from orc_split_elim_n0 order by userid POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_split_elim_n0 POSTHOOK: Output: default@orcfile_merge1_n2 -POSTHOOK: Lineage: orcfile_merge1_n2.decimal1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orcfile_merge1_n2.decimal1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.string1 SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.subtype SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orcfile_merge1_n2.ts SIMPLE [(orc_split_elim_n0)orc_split_elim_n0.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -76,42 +76,42 @@ File Version: 0.12 with ORC_135 Rows: 50000 Compression: ZLIB Compression size: 4096 -Type: struct +Type: struct Stripe Statistics: Stripe 1: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 File Statistics: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 Stripes: - Stripe: offset: 3 data: 5897 rows: 50000 tail: 113 index: 497 + Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433 Stream: column 0 section ROW_INDEX start: 3 length 17 - Stream: column 1 section ROW_INDEX start: 20 length 83 - Stream: column 2 section ROW_INDEX start: 103 length 81 - Stream: column 3 section ROW_INDEX start: 184 length 111 - Stream: column 4 section ROW_INDEX start: 295 length 110 - Stream: column 5 section ROW_INDEX start: 405 length 95 - Stream: column 1 section DATA start: 500 length 45 - Stream: column 2 section DATA start: 545 length 41 - Stream: column 2 section LENGTH start: 586 length 8 - Stream: column 2 section DICTIONARY_DATA start: 594 length 23 - Stream: column 3 section DATA start: 617 length 5167 - Stream: column 4 section DATA start: 5784 length 524 - Stream: column 4 section SECONDARY start: 6308 length 18 - Stream: column 5 section DATA start: 6326 length 53 - Stream: column 5 section SECONDARY start: 6379 length 18 + Stream: column 1 section ROW_INDEX start: 20 length 73 + Stream: column 2 section ROW_INDEX start: 93 length 79 + Stream: column 3 section ROW_INDEX start: 172 length 85 + Stream: column 4 section ROW_INDEX start: 257 length 92 + Stream: column 5 section ROW_INDEX start: 349 length 87 + Stream: column 1 section DATA start: 436 length 30 + Stream: column 2 section DATA start: 466 length 24 + Stream: column 2 section LENGTH start: 490 length 8 + Stream: column 2 section DICTIONARY_DATA start: 498 length 23 + Stream: column 3 section DATA start: 521 length 5114 + Stream: column 4 section DATA start: 5635 length 480 + Stream: column 4 section SECONDARY start: 6115 length 18 + Stream: column 5 section DATA start: 6133 length 46 + Stream: column 5 section SECONDARY start: 6179 length 18 Encoding column 0: DIRECT Encoding column 1: DIRECT_V2 Encoding column 2: DICTIONARY_V2[6] @@ -125,37 +125,37 @@ Stripes: Entry 3: count: 10000 hasNull: false positions: Entry 4: count: 10000 hasNull: false positions: Row group indices for column 1: - Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999815 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: 29 max: 100 sum: 999899 positions: 0,101,391 - Entry 2: count: 10000 hasNull: false min: 2 max: 100 sum: 999807 positions: 0,207,391 - Entry 3: count: 10000 hasNull: false min: 13 max: 100 sum: 999842 positions: 0,313,391 - Entry 4: count: 10000 hasNull: false min: 5 max: 100 sum: 999875 positions: 0,419,391 + Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999238 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,107,262 + Entry 2: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,207,22 + Entry 3: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,302,294 + Entry 4: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,402,54 Row group indices for column 2: - Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: cat max: zebra sum: 49996 positions: 0,82,391 - Entry 2: count: 10000 hasNull: false min: eat max: zebra sum: 49996 positions: 0,168,391 - Entry 3: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,254,391 - Entry 4: count: 10000 hasNull: false min: dog max: zebra sum: 49996 positions: 0,340,391 + Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49980 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,83,262 + Entry 2: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,163,22 + Entry 3: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,239,294 + Entry 4: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,319,54 Row group indices for column 3: - Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80064.8 positions: 0,0 - Entry 1: count: 10000 hasNull: false min: 1.8 max: 8.0 sum: 79993.8 positions: 1002,2176 - Entry 2: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79985.6 positions: 2053,256 - Entry 3: count: 10000 hasNull: false min: 8.0 max: 80.0 sum: 80072.0 positions: 3067,2432 - Entry 4: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79986.6 positions: 4117,512 + Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80102.8 positions: 0,0 + Entry 1: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 1017,2176 + Entry 2: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 2057,256 + Entry 3: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 3045,2432 + Entry 4: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 4085,512 Row group indices for column 4: - Entry 0: count: 10000 hasNull: false min: 0 max: 2 sum: 3 positions: 0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 0 max: 4 sum: 7 positions: 83,1808,0,76,272 - Entry 2: count: 10000 hasNull: false min: 0 max: 6 sum: 7 positions: 167,3616,0,156,32 - Entry 3: count: 10000 hasNull: false min: 0 max: 3 sum: 5 positions: 290,1328,0,232,304 - Entry 4: count: 10000 hasNull: false min: 0 max: 6 sum: 10 positions: 380,3136,0,312,64 + Entry 0: count: 10000 hasNull: false min: 0 max: 6 sum: 32 positions: 0,0,0,0,0 + Entry 1: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 84,1808,0,76,272 + Entry 2: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 160,3616,0,156,32 + Entry 3: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 274,1328,0,232,304 + Entry 4: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 350,3136,0,312,64 Row group indices for column 5: Entry 0: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:00:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,164,391,0,76,272 - Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,336,391,0,156,32 - Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:00:05.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:05.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,508,391,0,232,304 - Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:00:15.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:15.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,680,391,0,312,64 + Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,194,262,0,76,272 + Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,354,22,0,156,32 + Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304 + Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64 -File length: 6890 bytes +File length: 6685 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ @@ -167,42 +167,42 @@ File Version: 0.12 with ORC_135 Rows: 50000 Compression: ZLIB Compression size: 4096 -Type: struct +Type: struct Stripe Statistics: Stripe 1: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 File Statistics: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 Stripes: - Stripe: offset: 3 data: 5897 rows: 50000 tail: 113 index: 497 + Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433 Stream: column 0 section ROW_INDEX start: 3 length 17 - Stream: column 1 section ROW_INDEX start: 20 length 83 - Stream: column 2 section ROW_INDEX start: 103 length 81 - Stream: column 3 section ROW_INDEX start: 184 length 111 - Stream: column 4 section ROW_INDEX start: 295 length 110 - Stream: column 5 section ROW_INDEX start: 405 length 95 - Stream: column 1 section DATA start: 500 length 45 - Stream: column 2 section DATA start: 545 length 41 - Stream: column 2 section LENGTH start: 586 length 8 - Stream: column 2 section DICTIONARY_DATA start: 594 length 23 - Stream: column 3 section DATA start: 617 length 5167 - Stream: column 4 section DATA start: 5784 length 524 - Stream: column 4 section SECONDARY start: 6308 length 18 - Stream: column 5 section DATA start: 6326 length 53 - Stream: column 5 section SECONDARY start: 6379 length 18 + Stream: column 1 section ROW_INDEX start: 20 length 73 + Stream: column 2 section ROW_INDEX start: 93 length 79 + Stream: column 3 section ROW_INDEX start: 172 length 85 + Stream: column 4 section ROW_INDEX start: 257 length 92 + Stream: column 5 section ROW_INDEX start: 349 length 87 + Stream: column 1 section DATA start: 436 length 30 + Stream: column 2 section DATA start: 466 length 24 + Stream: column 2 section LENGTH start: 490 length 8 + Stream: column 2 section DICTIONARY_DATA start: 498 length 23 + Stream: column 3 section DATA start: 521 length 5114 + Stream: column 4 section DATA start: 5635 length 480 + Stream: column 4 section SECONDARY start: 6115 length 18 + Stream: column 5 section DATA start: 6133 length 46 + Stream: column 5 section SECONDARY start: 6179 length 18 Encoding column 0: DIRECT Encoding column 1: DIRECT_V2 Encoding column 2: DICTIONARY_V2[6] @@ -216,37 +216,37 @@ Stripes: Entry 3: count: 10000 hasNull: false positions: Entry 4: count: 10000 hasNull: false positions: Row group indices for column 1: - Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999815 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: 29 max: 100 sum: 999899 positions: 0,101,391 - Entry 2: count: 10000 hasNull: false min: 2 max: 100 sum: 999807 positions: 0,207,391 - Entry 3: count: 10000 hasNull: false min: 13 max: 100 sum: 999842 positions: 0,313,391 - Entry 4: count: 10000 hasNull: false min: 5 max: 100 sum: 999875 positions: 0,419,391 + Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999238 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,107,262 + Entry 2: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,207,22 + Entry 3: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,302,294 + Entry 4: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,402,54 Row group indices for column 2: - Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: cat max: zebra sum: 49996 positions: 0,82,391 - Entry 2: count: 10000 hasNull: false min: eat max: zebra sum: 49996 positions: 0,168,391 - Entry 3: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,254,391 - Entry 4: count: 10000 hasNull: false min: dog max: zebra sum: 49996 positions: 0,340,391 + Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49980 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,83,262 + Entry 2: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,163,22 + Entry 3: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,239,294 + Entry 4: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,319,54 Row group indices for column 3: - Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80064.8 positions: 0,0 - Entry 1: count: 10000 hasNull: false min: 1.8 max: 8.0 sum: 79993.8 positions: 1002,2176 - Entry 2: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79985.6 positions: 2053,256 - Entry 3: count: 10000 hasNull: false min: 8.0 max: 80.0 sum: 80072.0 positions: 3067,2432 - Entry 4: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79986.6 positions: 4117,512 + Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80102.8 positions: 0,0 + Entry 1: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 1017,2176 + Entry 2: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 2057,256 + Entry 3: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 3045,2432 + Entry 4: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 4085,512 Row group indices for column 4: - Entry 0: count: 10000 hasNull: false min: 0 max: 2 sum: 3 positions: 0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 0 max: 4 sum: 7 positions: 83,1808,0,76,272 - Entry 2: count: 10000 hasNull: false min: 0 max: 6 sum: 7 positions: 167,3616,0,156,32 - Entry 3: count: 10000 hasNull: false min: 0 max: 3 sum: 5 positions: 290,1328,0,232,304 - Entry 4: count: 10000 hasNull: false min: 0 max: 6 sum: 10 positions: 380,3136,0,312,64 + Entry 0: count: 10000 hasNull: false min: 0 max: 6 sum: 32 positions: 0,0,0,0,0 + Entry 1: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 84,1808,0,76,272 + Entry 2: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 160,3616,0,156,32 + Entry 3: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 274,1328,0,232,304 + Entry 4: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 350,3136,0,312,64 Row group indices for column 5: Entry 0: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:00:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,164,391,0,76,272 - Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,336,391,0,156,32 - Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:00:05.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:05.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,508,391,0,232,304 - Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:00:15.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:15.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,680,391,0,312,64 + Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,194,262,0,76,272 + Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,354,22,0,156,32 + Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304 + Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64 -File length: 6890 bytes +File length: 6685 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ @@ -279,49 +279,49 @@ File Version: 0.12 with ORC_135 Rows: 100000 Compression: ZLIB Compression size: 4096 -Type: struct +Type: struct Stripe Statistics: Stripe 1: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 Stripe 2: Column 0: count: 50000 hasNull: false - Column 1: count: 50000 hasNull: false bytesOnDisk: 45 min: 2 max: 100 sum: 4999238 - Column 2: count: 50000 hasNull: false bytesOnDisk: 72 min: bar max: zebra sum: 249980 - Column 3: count: 50000 hasNull: false bytesOnDisk: 5167 min: 0.8 max: 80.0 sum: 400102.80000000005 - Column 4: count: 50000 hasNull: false bytesOnDisk: 542 min: 0 max: 6 sum: 32 - Column 5: count: 50000 hasNull: false bytesOnDisk: 71 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 50000 hasNull: false bytesOnDisk: 30 min: 2 max: 100 sum: 4999238 + Column 2: count: 50000 hasNull: false bytesOnDisk: 55 min: bar max: zebra sum: 249980 + Column 3: count: 50000 hasNull: false bytesOnDisk: 5114 min: 0.8 max: 80.0 sum: 400102.8 + Column 4: count: 50000 hasNull: false bytesOnDisk: 498 min: 0 max: 6 sum: 32 + Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 File Statistics: Column 0: count: 100000 hasNull: false - Column 1: count: 100000 hasNull: false bytesOnDisk: 90 min: 2 max: 100 sum: 9998476 - Column 2: count: 100000 hasNull: false bytesOnDisk: 144 min: bar max: zebra sum: 499960 - Column 3: count: 100000 hasNull: false bytesOnDisk: 10334 min: 0.8 max: 80.0 sum: 800205.6000000001 - Column 4: count: 100000 hasNull: false bytesOnDisk: 1084 min: 0 max: 6 sum: 64 - Column 5: count: 100000 hasNull: false bytesOnDisk: 142 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 + Column 1: count: 100000 hasNull: false bytesOnDisk: 60 min: 2 max: 100 sum: 9998476 + Column 2: count: 100000 hasNull: false bytesOnDisk: 110 min: bar max: zebra sum: 499960 + Column 3: count: 100000 hasNull: false bytesOnDisk: 10228 min: 0.8 max: 80.0 sum: 800205.6 + Column 4: count: 100000 hasNull: false bytesOnDisk: 996 min: 0 max: 6 sum: 64 + Column 5: count: 100000 hasNull: false bytesOnDisk: 128 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 Stripes: - Stripe: offset: 3 data: 5897 rows: 50000 tail: 113 index: 497 + Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433 Stream: column 0 section ROW_INDEX start: 3 length 17 - Stream: column 1 section ROW_INDEX start: 20 length 83 - Stream: column 2 section ROW_INDEX start: 103 length 81 - Stream: column 3 section ROW_INDEX start: 184 length 111 - Stream: column 4 section ROW_INDEX start: 295 length 110 - Stream: column 5 section ROW_INDEX start: 405 length 95 - Stream: column 1 section DATA start: 500 length 45 - Stream: column 2 section DATA start: 545 length 41 - Stream: column 2 section LENGTH start: 586 length 8 - Stream: column 2 section DICTIONARY_DATA start: 594 length 23 - Stream: column 3 section DATA start: 617 length 5167 - Stream: column 4 section DATA start: 5784 length 524 - Stream: column 4 section SECONDARY start: 6308 length 18 - Stream: column 5 section DATA start: 6326 length 53 - Stream: column 5 section SECONDARY start: 6379 length 18 + Stream: column 1 section ROW_INDEX start: 20 length 73 + Stream: column 2 section ROW_INDEX start: 93 length 79 + Stream: column 3 section ROW_INDEX start: 172 length 85 + Stream: column 4 section ROW_INDEX start: 257 length 92 + Stream: column 5 section ROW_INDEX start: 349 length 87 + Stream: column 1 section DATA start: 436 length 30 + Stream: column 2 section DATA start: 466 length 24 + Stream: column 2 section LENGTH start: 490 length 8 + Stream: column 2 section DICTIONARY_DATA start: 498 length 23 + Stream: column 3 section DATA start: 521 length 5114 + Stream: column 4 section DATA start: 5635 length 480 + Stream: column 4 section SECONDARY start: 6115 length 18 + Stream: column 5 section DATA start: 6133 length 46 + Stream: column 5 section SECONDARY start: 6179 length 18 Encoding column 0: DIRECT Encoding column 1: DIRECT_V2 Encoding column 2: DICTIONARY_V2[6] @@ -335,51 +335,51 @@ Stripes: Entry 3: count: 10000 hasNull: false positions: Entry 4: count: 10000 hasNull: false positions: Row group indices for column 1: - Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999815 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: 29 max: 100 sum: 999899 positions: 0,101,391 - Entry 2: count: 10000 hasNull: false min: 2 max: 100 sum: 999807 positions: 0,207,391 - Entry 3: count: 10000 hasNull: false min: 13 max: 100 sum: 999842 positions: 0,313,391 - Entry 4: count: 10000 hasNull: false min: 5 max: 100 sum: 999875 positions: 0,419,391 + Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999238 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,107,262 + Entry 2: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,207,22 + Entry 3: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,302,294 + Entry 4: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,402,54 Row group indices for column 2: - Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: cat max: zebra sum: 49996 positions: 0,82,391 - Entry 2: count: 10000 hasNull: false min: eat max: zebra sum: 49996 positions: 0,168,391 - Entry 3: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,254,391 - Entry 4: count: 10000 hasNull: false min: dog max: zebra sum: 49996 positions: 0,340,391 + Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49980 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,83,262 + Entry 2: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,163,22 + Entry 3: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,239,294 + Entry 4: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,319,54 Row group indices for column 3: - Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80064.8 positions: 0,0 - Entry 1: count: 10000 hasNull: false min: 1.8 max: 8.0 sum: 79993.8 positions: 1002,2176 - Entry 2: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79985.6 positions: 2053,256 - Entry 3: count: 10000 hasNull: false min: 8.0 max: 80.0 sum: 80072.0 positions: 3067,2432 - Entry 4: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79986.6 positions: 4117,512 + Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80102.8 positions: 0,0 + Entry 1: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 1017,2176 + Entry 2: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 2057,256 + Entry 3: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 3045,2432 + Entry 4: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 4085,512 Row group indices for column 4: - Entry 0: count: 10000 hasNull: false min: 0 max: 2 sum: 3 positions: 0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 0 max: 4 sum: 7 positions: 83,1808,0,76,272 - Entry 2: count: 10000 hasNull: false min: 0 max: 6 sum: 7 positions: 167,3616,0,156,32 - Entry 3: count: 10000 hasNull: false min: 0 max: 3 sum: 5 positions: 290,1328,0,232,304 - Entry 4: count: 10000 hasNull: false min: 0 max: 6 sum: 10 positions: 380,3136,0,312,64 + Entry 0: count: 10000 hasNull: false min: 0 max: 6 sum: 32 positions: 0,0,0,0,0 + Entry 1: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 84,1808,0,76,272 + Entry 2: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 160,3616,0,156,32 + Entry 3: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 274,1328,0,232,304 + Entry 4: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 350,3136,0,312,64 Row group indices for column 5: Entry 0: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:00:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,164,391,0,76,272 - Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,336,391,0,156,32 - Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:00:05.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:05.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,508,391,0,232,304 - Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:00:15.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:15.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,680,391,0,312,64 - Stripe: offset: 6510 data: 5897 rows: 50000 tail: 113 index: 497 - Stream: column 0 section ROW_INDEX start: 6510 length 17 - Stream: column 1 section ROW_INDEX start: 6527 length 83 - Stream: column 2 section ROW_INDEX start: 6610 length 81 - Stream: column 3 section ROW_INDEX start: 6691 length 111 - Stream: column 4 section ROW_INDEX start: 6802 length 110 - Stream: column 5 section ROW_INDEX start: 6912 length 95 - Stream: column 1 section DATA start: 7007 length 45 - Stream: column 2 section DATA start: 7052 length 41 - Stream: column 2 section LENGTH start: 7093 length 8 - Stream: column 2 section DICTIONARY_DATA start: 7101 length 23 - Stream: column 3 section DATA start: 7124 length 5167 - Stream: column 4 section DATA start: 12291 length 524 - Stream: column 4 section SECONDARY start: 12815 length 18 - Stream: column 5 section DATA start: 12833 length 53 - Stream: column 5 section SECONDARY start: 12886 length 18 + Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,194,262,0,76,272 + Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,354,22,0,156,32 + Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304 + Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64 + Stripe: offset: 6309 data: 5761 rows: 50000 tail: 112 index: 433 + Stream: column 0 section ROW_INDEX start: 6309 length 17 + Stream: column 1 section ROW_INDEX start: 6326 length 73 + Stream: column 2 section ROW_INDEX start: 6399 length 79 + Stream: column 3 section ROW_INDEX start: 6478 length 85 + Stream: column 4 section ROW_INDEX start: 6563 length 92 + Stream: column 5 section ROW_INDEX start: 6655 length 87 + Stream: column 1 section DATA start: 6742 length 30 + Stream: column 2 section DATA start: 6772 length 24 + Stream: column 2 section LENGTH start: 6796 length 8 + Stream: column 2 section DICTIONARY_DATA start: 6804 length 23 + Stream: column 3 section DATA start: 6827 length 5114 + Stream: column 4 section DATA start: 11941 length 480 + Stream: column 4 section SECONDARY start: 12421 length 18 + Stream: column 5 section DATA start: 12439 length 46 + Stream: column 5 section SECONDARY start: 12485 length 18 Encoding column 0: DIRECT Encoding column 1: DIRECT_V2 Encoding column 2: DICTIONARY_V2[6] @@ -393,37 +393,37 @@ Stripes: Entry 3: count: 10000 hasNull: false positions: Entry 4: count: 10000 hasNull: false positions: Row group indices for column 1: - Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999815 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: 29 max: 100 sum: 999899 positions: 0,101,391 - Entry 2: count: 10000 hasNull: false min: 2 max: 100 sum: 999807 positions: 0,207,391 - Entry 3: count: 10000 hasNull: false min: 13 max: 100 sum: 999842 positions: 0,313,391 - Entry 4: count: 10000 hasNull: false min: 5 max: 100 sum: 999875 positions: 0,419,391 + Entry 0: count: 10000 hasNull: false min: 2 max: 100 sum: 999238 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,107,262 + Entry 2: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,207,22 + Entry 3: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,302,294 + Entry 4: count: 10000 hasNull: false min: 100 max: 100 sum: 1000000 positions: 0,402,54 Row group indices for column 2: - Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,0,0 - Entry 1: count: 10000 hasNull: false min: cat max: zebra sum: 49996 positions: 0,82,391 - Entry 2: count: 10000 hasNull: false min: eat max: zebra sum: 49996 positions: 0,168,391 - Entry 3: count: 10000 hasNull: false min: bar max: zebra sum: 49996 positions: 0,254,391 - Entry 4: count: 10000 hasNull: false min: dog max: zebra sum: 49996 positions: 0,340,391 + Entry 0: count: 10000 hasNull: false min: bar max: zebra sum: 49980 positions: 0,0,0 + Entry 1: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,83,262 + Entry 2: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,163,22 + Entry 3: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,239,294 + Entry 4: count: 10000 hasNull: false min: zebra max: zebra sum: 50000 positions: 0,319,54 Row group indices for column 3: - Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80064.8 positions: 0,0 - Entry 1: count: 10000 hasNull: false min: 1.8 max: 8.0 sum: 79993.8 positions: 1002,2176 - Entry 2: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79985.6 positions: 2053,256 - Entry 3: count: 10000 hasNull: false min: 8.0 max: 80.0 sum: 80072.0 positions: 3067,2432 - Entry 4: count: 10000 hasNull: false min: 0.8 max: 8.0 sum: 79986.6 positions: 4117,512 + Entry 0: count: 10000 hasNull: false min: 0.8 max: 80.0 sum: 80102.8 positions: 0,0 + Entry 1: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 1017,2176 + Entry 2: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 2057,256 + Entry 3: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 3045,2432 + Entry 4: count: 10000 hasNull: false min: 8.0 max: 8.0 sum: 80000.0 positions: 4085,512 Row group indices for column 4: - Entry 0: count: 10000 hasNull: false min: 0 max: 2 sum: 3 positions: 0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 0 max: 4 sum: 7 positions: 83,1808,0,76,272 - Entry 2: count: 10000 hasNull: false min: 0 max: 6 sum: 7 positions: 167,3616,0,156,32 - Entry 3: count: 10000 hasNull: false min: 0 max: 3 sum: 5 positions: 290,1328,0,232,304 - Entry 4: count: 10000 hasNull: false min: 0 max: 6 sum: 10 positions: 380,3136,0,312,64 + Entry 0: count: 10000 hasNull: false min: 0 max: 6 sum: 32 positions: 0,0,0,0,0 + Entry 1: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 84,1808,0,76,272 + Entry 2: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 160,3616,0,156,32 + Entry 3: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 274,1328,0,232,304 + Entry 4: count: 10000 hasNull: false min: 0 max: 0 sum: 0 positions: 350,3136,0,312,64 Row group indices for column 5: Entry 0: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,0,0,0,0,0 - Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:00:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,164,391,0,76,272 - Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,336,391,0,156,32 - Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:00:05.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:05.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,508,391,0,232,304 - Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:00:15.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:15.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,680,391,0,312,64 + Entry 1: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,194,262,0,76,272 + Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,354,22,0,156,32 + Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304 + Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64 -File length: 13411 bytes +File length: 13004 bytes Padding length: 0 bytes Padding ratio: 0% ________________________________________________________________________________________________________________________ diff --git a/ql/src/test/results/clientpositive/orc_merge5.q.out b/ql/src/test/results/clientpositive/orc_merge5.q.out index 0e87ce6..768132c 100644 --- a/ql/src/test/results/clientpositive/orc_merge5.q.out +++ b/ql/src/test/results/clientpositive/orc_merge5.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n5 -POSTHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n5 -PREHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5b_n0 @@ -43,7 +43,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -55,7 +55,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5b_n0 Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp) outputColumnNames: userid, string1, subtype, decimal1, ts Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -66,7 +66,7 @@ STAGE PLANS: Reduce Output Operator sort order: Statistics: Num rows: 1 Data size: 2344 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) + value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) Reduce Operator Tree: Group By Operator aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3), compute_stats(VALUE._col4) @@ -96,7 +96,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5b_n0 PREHOOK: query: insert overwrite table orc_merge5b_n0 select userid,string1,subtype,decimal1,ts from orc_merge5_n5 where userid<=13 @@ -107,7 +107,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b_n0 select userid,string1,sub POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n5 POSTHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.string1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.subtype SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.ts SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -159,7 +159,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -171,7 +171,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5b_n0 Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp) outputColumnNames: userid, string1, subtype, decimal1, ts Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -182,7 +182,7 @@ STAGE PLANS: Reduce Output Operator sort order: Statistics: Num rows: 1 Data size: 2344 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) + value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) Reduce Operator Tree: Group By Operator aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3), compute_stats(VALUE._col4) @@ -221,7 +221,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5b_n0 Stage: Stage-3 @@ -252,7 +252,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b_n0 select userid,string1,sub POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n5 POSTHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.string1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.subtype SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.ts SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -286,7 +286,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b_n0 select userid,string1,sub POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n5 POSTHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.string1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.subtype SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.ts SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:ts, type:timestamp, comment:null), ] diff --git a/ql/src/test/results/clientpositive/orc_merge6.q.out b/ql/src/test/results/clientpositive/orc_merge6.q.out index 39813b7..7c429d6 100644 --- a/ql/src/test/results/clientpositive/orc_merge6.q.out +++ b/ql/src/test/results/clientpositive/orc_merge6.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n4 -POSTHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n4 -PREHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (year string, hour int) stored as orc +PREHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (year string, hour int) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5a_n1 -POSTHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (year string, hour int) stored as orc +POSTHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (year string, hour int) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5a_n1 @@ -43,7 +43,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -55,7 +55,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5a_n1 Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int) outputColumnNames: userid, string1, subtype, decimal1, ts, year, hour Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -69,7 +69,7 @@ STAGE PLANS: sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: int) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE - value expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct) + value expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct) Reduce Operator Tree: Group By Operator aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3), compute_stats(VALUE._col4) @@ -78,7 +78,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col0 (type: string), _col1 (type: int) + expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col0 (type: string), _col1 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -107,7 +107,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5a_n1 PREHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2000",hour=24) select userid,string1,subtype,decimal1,ts from orc_merge5_n4 where userid<=13 @@ -118,7 +118,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2000",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2000/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -131,7 +131,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2001",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -212,7 +212,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -224,7 +224,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5a_n1 Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int) outputColumnNames: userid, string1, subtype, decimal1, ts, year, hour Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -238,7 +238,7 @@ STAGE PLANS: sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: int) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE - value expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct) + value expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct) Reduce Operator Tree: Group By Operator aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3), compute_stats(VALUE._col4) @@ -247,7 +247,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col0 (type: string), _col1 (type: int) + expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col0 (type: string), _col1 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -285,7 +285,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5a_n1 Stage: Stage-3 @@ -316,7 +316,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2000",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2000/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -329,7 +329,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2001",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -392,7 +392,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2000",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2000/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -405,7 +405,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2001",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] diff --git a/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out b/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out index 5a1b00b..6295714 100644 --- a/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out +++ b/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n3 -POSTHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n3 -PREHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5b -POSTHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5b @@ -42,7 +42,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -54,7 +54,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5b Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp) outputColumnNames: userid, string1, subtype, decimal1, ts Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -65,7 +65,7 @@ STAGE PLANS: Reduce Output Operator sort order: Statistics: Num rows: 1 Data size: 2344 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) + value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct) Reduce Operator Tree: Group By Operator aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3), compute_stats(VALUE._col4) @@ -95,7 +95,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5b PREHOOK: query: insert overwrite table orc_merge5b select userid,string1,subtype,decimal1,ts from orc_merge5_n3 where userid<=13 @@ -106,7 +106,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b select userid,string1,subtyp POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -119,7 +119,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -132,7 +132,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -145,7 +145,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -158,7 +158,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -171,7 +171,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] diff --git a/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out b/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out index 0b76bfb..95fa5ca 100644 --- a/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out +++ b/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5 -POSTHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5 -PREHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc +PREHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5a -POSTHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc +POSTHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5a @@ -39,7 +39,7 @@ STAGE PLANS: alias: orc_merge5 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp), subtype (type: double) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -51,7 +51,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.orc_merge5a Select Operator - expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp), _col5 (type: double) + expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), _col5 (type: double) outputColumnNames: userid, string1, subtype, decimal1, ts, st Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -65,7 +65,7 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col0 (type: double) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE - value expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct) + value expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct) Reduce Operator Tree: Group By Operator aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3), compute_stats(VALUE._col4) @@ -74,7 +74,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col0 (type: double) + expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col0 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -102,7 +102,7 @@ STAGE PLANS: Basic Stats Work: Column Stats Desc: Columns: userid, string1, subtype, decimal1, ts - Column Types: bigint, string, double, decimal(10,0), timestamp + Column Types: bigint, string, double, decimal(38,0), timestamp Table: default.orc_merge5a PREHOOK: query: insert overwrite table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5 order by userid @@ -116,22 +116,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -147,22 +147,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -178,22 +178,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -209,22 +209,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] diff --git a/ql/src/test/results/clientpositive/orc_struct_type_vectorization.q.out b/ql/src/test/results/clientpositive/orc_struct_type_vectorization.q.out index c67e8d1..66daa07 100644 --- a/ql/src/test/results/clientpositive/orc_struct_type_vectorization.q.out +++ b/ql/src/test/results/clientpositive/orc_struct_type_vectorization.q.out @@ -122,8 +122,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -257,8 +257,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/orc_merge5.q.out b/ql/src/test/results/clientpositive/spark/orc_merge5.q.out index e9e24b1..5033c13 100644 --- a/ql/src/test/results/clientpositive/spark/orc_merge5.q.out +++ b/ql/src/test/results/clientpositive/spark/orc_merge5.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n5 -POSTHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n5 -PREHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5b_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5b_n0 @@ -46,7 +46,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -80,7 +80,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b_n0 select userid,string1,sub POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n5 POSTHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.string1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.subtype SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.ts SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -137,7 +137,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -208,7 +208,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b_n0 select userid,string1,sub POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n5 POSTHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.string1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.subtype SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.ts SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -222,7 +222,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5b_n0 POSTHOOK: Output: default@orc_merge5b_n0 Found 1 items --rw-r--r-- 3 ### USER ### ### GROUP ### 1051 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 1054 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: select * from orc_merge5b_n0 PREHOOK: type: QUERY PREHOOK: Input: default@orc_merge5b_n0 @@ -242,7 +242,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b_n0 select userid,string1,sub POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n5 POSTHOOK: Output: default@orc_merge5b_n0 -POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b_n0.decimal1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.string1 SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.subtype SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b_n0.ts SIMPLE [(orc_merge5_n5)orc_merge5_n5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -313,7 +313,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5b_n0 POSTHOOK: Output: default@orc_merge5b_n0 Found 1 items --rw-r--r-- 3 ### USER ### ### GROUP ### 1051 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 1054 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: select * from orc_merge5b_n0 PREHOOK: type: QUERY PREHOOK: Input: default@orc_merge5b_n0 diff --git a/ql/src/test/results/clientpositive/spark/orc_merge6.q.out b/ql/src/test/results/clientpositive/spark/orc_merge6.q.out index 99624bc..e50ab30 100644 --- a/ql/src/test/results/clientpositive/spark/orc_merge6.q.out +++ b/ql/src/test/results/clientpositive/spark/orc_merge6.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n4 -POSTHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n4 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n4 -PREHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (year string, hour int) stored as orc +PREHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (year string, hour int) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5a_n1 -POSTHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (year string, hour int) stored as orc +POSTHOOK: query: create table orc_merge5a_n1 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (year string, hour int) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5a_n1 @@ -46,7 +46,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -83,7 +83,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2000",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2000/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -96,7 +96,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2001",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -184,7 +184,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -258,7 +258,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2000",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2000/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -271,7 +271,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2001",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -297,9 +297,9 @@ POSTHOOK: Input: default@orc_merge5a_n1 POSTHOOK: Output: default@orc_merge5a_n1 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24 Found 1 items --rw-r--r-- 3 ### USER ### ### GROUP ### 1051 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 1054 ### HDFS DATE ### hdfs://### HDFS PATH ### Found 1 items --rw-r--r-- 3 ### USER ### ### GROUP ### 1051 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 1054 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: show partitions orc_merge5a_n1 PREHOOK: type: SHOWPARTITIONS PREHOOK: Input: default@orc_merge5a_n1 @@ -334,7 +334,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2000",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2000/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2000,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -347,7 +347,7 @@ POSTHOOK: query: insert overwrite table orc_merge5a_n1 partition (year="2001",ho POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n4 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24 -POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).decimal1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).string1 SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).subtype SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n1 PARTITION(year=2001,hour=24).ts SIMPLE [(orc_merge5_n4)orc_merge5_n4.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -472,9 +472,9 @@ POSTHOOK: Input: default@orc_merge5a_n1 POSTHOOK: Output: default@orc_merge5a_n1 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24 Found 1 items --rw-r--r-- 3 ### USER ### ### GROUP ### 1051 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 1054 ### HDFS DATE ### hdfs://### HDFS PATH ### Found 1 items --rw-r--r-- 3 ### USER ### ### GROUP ### 1051 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 1054 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: show partitions orc_merge5a_n1 PREHOOK: type: SHOWPARTITIONS PREHOOK: Input: default@orc_merge5a_n1 diff --git a/ql/src/test/results/clientpositive/spark/orc_merge7.q.out b/ql/src/test/results/clientpositive/spark/orc_merge7.q.out index 05177f8..aa2f8bd 100644 --- a/ql/src/test/results/clientpositive/spark/orc_merge7.q.out +++ b/ql/src/test/results/clientpositive/spark/orc_merge7.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5_n2 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n2 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n2 -POSTHOOK: query: create table orc_merge5_n2 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n2 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n2 -PREHOOK: query: create table orc_merge5a_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc +PREHOOK: query: create table orc_merge5a_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5a_n0 -POSTHOOK: query: create table orc_merge5a_n0 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc +POSTHOOK: query: create table orc_merge5a_n0 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5a_n0 @@ -42,7 +42,7 @@ STAGE PLANS: alias: orc_merge5_n2 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp), subtype (type: double) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -81,22 +81,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -112,22 +112,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -211,7 +211,7 @@ STAGE PLANS: alias: orc_merge5_n2 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp), subtype (type: double) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -287,22 +287,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -318,22 +318,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -361,7 +361,7 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 Found 1 items -rw-r--r-- 3 ### USER ### ### GROUP ### 614 ### HDFS DATE ### hdfs://### HDFS PATH ### Found 1 items --rw-r--r-- 3 ### USER ### ### GROUP ### 968 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 971 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: show partitions orc_merge5a_n0 PREHOOK: type: SHOWPARTITIONS PREHOOK: Input: default@orc_merge5a_n0 @@ -402,22 +402,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -433,22 +433,22 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 POSTHOOK: Output: default@orc_merge5a_n0@st=1.8 POSTHOOK: Output: default@orc_merge5a_n0@st=8.0 POSTHOOK: Output: default@orc_merge5a_n0@st=80.0 -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=0.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=1.8).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=8.0).userid SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).string1 SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).subtype SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a_n0 PARTITION(st=80.0).ts SIMPLE [(orc_merge5_n2)orc_merge5_n2.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -574,7 +574,7 @@ POSTHOOK: Output: default@orc_merge5a_n0@st=0.8 Found 1 items -rw-r--r-- 3 ### USER ### ### GROUP ### 614 ### HDFS DATE ### hdfs://### HDFS PATH ### Found 1 items --rw-r--r-- 3 ### USER ### ### GROUP ### 968 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 971 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: show partitions orc_merge5a_n0 PREHOOK: type: SHOWPARTITIONS PREHOOK: Input: default@orc_merge5a_n0 diff --git a/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out b/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out index 72cccd8..f01c368 100644 --- a/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out +++ b/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n3 -POSTHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n3 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n3 -PREHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5b -POSTHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5b (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5b @@ -45,7 +45,7 @@ STAGE PLANS: predicate: (userid <= 13L) (type: boolean) Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -79,7 +79,7 @@ POSTHOOK: query: insert overwrite table orc_merge5b select userid,string1,subtyp POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -92,7 +92,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -105,7 +105,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -118,7 +118,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -131,7 +131,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -144,7 +144,7 @@ POSTHOOK: query: insert into table orc_merge5b select userid,string1,subtype,dec POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_merge5_n3 POSTHOOK: Output: default@orc_merge5b -POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5b.decimal1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5b.string1 SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5b.subtype SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5b.ts SIMPLE [(orc_merge5_n3)orc_merge5_n3.FieldSchema(name:ts, type:timestamp, comment:null), ] diff --git a/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out b/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out index e81ae06..617b873 100644 --- a/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out +++ b/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out @@ -1,16 +1,16 @@ -PREHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5 -POSTHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5 -PREHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc +PREHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5a -POSTHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc +POSTHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) partitioned by (st double) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5a @@ -42,7 +42,7 @@ STAGE PLANS: alias: orc_merge5 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp), subtype (type: double) + expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -81,22 +81,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -112,22 +112,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -143,22 +143,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -174,22 +174,22 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8 POSTHOOK: Output: default@orc_merge5a@st=1.8 POSTHOOK: Output: default@orc_merge5a@st=8.0 POSTHOOK: Output: default@orc_merge5a@st=80.0 -POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ] -POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ] +POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(38,0), comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ] POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ] @@ -215,8 +215,8 @@ POSTHOOK: Input: default@orc_merge5a POSTHOOK: Output: default@orc_merge5a POSTHOOK: Output: default@orc_merge5a@st=0.8 Found 4 items --rw-r--r-- 3 ### USER ### ### GROUP ### 613 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-r--r-- 3 ### USER ### ### GROUP ### 613 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 614 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-r--r-- 3 ### USER ### ### GROUP ### 614 ### HDFS DATE ### hdfs://### HDFS PATH ### -rw-r--r-- 3 ### USER ### ### GROUP ### 614 ### HDFS DATE ### hdfs://### HDFS PATH ### -rw-r--r-- 3 ### USER ### ### GROUP ### 614 ### HDFS DATE ### hdfs://### HDFS PATH ### Found 4 items diff --git a/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out index 634de07..ea03c0b 100644 --- a/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out +++ b/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out @@ -329,8 +329,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -433,8 +433,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -636,8 +636,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -814,8 +814,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -920,8 +920,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1125,8 +1125,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1294,8 +1294,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1407,8 +1407,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1576,8 +1576,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1682,8 +1682,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1852,8 +1852,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1915,8 +1915,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2020,8 +2020,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2071,8 +2071,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2302,8 +2302,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2353,8 +2353,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2577,8 +2577,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2680,8 +2680,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2883,8 +2883,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3060,8 +3060,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3164,8 +3164,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3367,8 +3367,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3545,8 +3545,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3651,8 +3651,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3819,8 +3819,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3924,8 +3924,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4129,8 +4129,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4333,8 +4333,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4511,8 +4511,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4617,8 +4617,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5079,8 +5079,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5277,8 +5277,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5380,8 +5380,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5548,8 +5548,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5652,8 +5652,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5801,8 +5801,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5860,8 +5860,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6053,8 +6053,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6156,8 +6156,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6307,8 +6307,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6370,8 +6370,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6475,8 +6475,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6526,8 +6526,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6707,8 +6707,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6808,8 +6808,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8920,8 +8920,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -9163,8 +9163,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -9399,8 +9399,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -9471,8 +9471,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -9757,8 +9757,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -9998,8 +9998,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -10232,8 +10232,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -10466,8 +10466,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -10972,8 +10972,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -11241,8 +11241,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -11431,8 +11431,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -11568,8 +11568,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -11640,8 +11640,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -11916,8 +11916,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12018,8 +12018,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_between_in.q.out b/ql/src/test/results/clientpositive/spark/vector_between_in.q.out index 9f5fa2a..8390a6a 100644 --- a/ql/src/test/results/clientpositive/spark/vector_between_in.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_between_in.q.out @@ -77,8 +77,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -180,8 +180,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -277,8 +277,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -380,8 +380,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -477,8 +477,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -570,8 +570,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -663,8 +663,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -766,8 +766,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1119,8 +1119,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1255,8 +1255,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1391,8 +1391,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -1527,8 +1527,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out b/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out index 5932c0a..3e74a8f 100644 --- a/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out @@ -169,8 +169,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_char_4.q.out b/ql/src/test/results/clientpositive/spark/vector_char_4.q.out index 96b829e..c6b8203 100644 --- a/ql/src/test/results/clientpositive/spark/vector_char_4.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_char_4.q.out @@ -172,8 +172,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_count_distinct.q.out b/ql/src/test/results/clientpositive/spark/vector_count_distinct.q.out index 1444cd8..1cf0724 100644 --- a/ql/src/test/results/clientpositive/spark/vector_count_distinct.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_count_distinct.q.out @@ -1287,8 +1287,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_data_types.q.out b/ql/src/test/results/clientpositive/spark/vector_data_types.q.out index 65f070f..fc44d8b 100644 --- a/ql/src/test/results/clientpositive/spark/vector_data_types.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_data_types.q.out @@ -254,8 +254,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -391,8 +391,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out index c5d0214..e3d815b 100644 --- a/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out @@ -109,8 +109,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -287,8 +287,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out b/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out index 735c4dc..188546c 100644 --- a/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out @@ -128,8 +128,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -202,8 +202,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -339,6 +339,17 @@ POSTHOOK: Input: default@t2_n29 9.00 9 9.00 9 9.00 9 +PREHOOK: query: select count(*) from (select t1_n48.`dec`, t2_n29.`dec` from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n48 +PREHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_n48.`dec`, t2_n29.`dec` from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n48 +POSTHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +106 PREHOOK: query: explain vectorization detail select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`) PREHOOK: type: QUERY @@ -393,8 +404,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -467,8 +478,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -604,6 +615,17 @@ POSTHOOK: Input: default@t2_n29 9.00 48.96 9 5 9.00 48.96 9 7 9.00 48.96 9 7 +PREHOOK: query: select count(*) from (select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n48 +PREHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n48 +POSTHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +106 PREHOOK: query: CREATE TABLE over1k_small(t tinyint, si smallint, i int, @@ -840,6 +862,17 @@ POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### 89.00 89 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 PREHOOK: query: explain vectorization detail select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`) PREHOOK: type: QUERY @@ -1000,6 +1033,17 @@ POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### 89.00 15.09 89 15 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 PREHOOK: query: explain vectorization detail select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`) PREHOOK: type: QUERY @@ -1162,6 +1206,17 @@ POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### 89.00 89 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 PREHOOK: query: explain vectorization detail select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`) PREHOOK: type: QUERY @@ -1324,3 +1379,14 @@ POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### 89.00 15.09 89 15 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 diff --git a/ql/src/test/results/clientpositive/spark/vector_distinct_2.q.out b/ql/src/test/results/clientpositive/spark/vector_distinct_2.q.out index f9d0272..a40484f 100644 --- a/ql/src/test/results/clientpositive/spark/vector_distinct_2.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_distinct_2.q.out @@ -162,8 +162,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_elt.q.out b/ql/src/test/results/clientpositive/spark/vector_elt.q.out index b938d8c..db00391 100644 --- a/ql/src/test/results/clientpositive/spark/vector_elt.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_elt.q.out @@ -62,8 +62,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -172,8 +172,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_groupby_3.q.out b/ql/src/test/results/clientpositive/spark/vector_groupby_3.q.out index 02c7c50..bbce14c 100644 --- a/ql/src/test/results/clientpositive/spark/vector_groupby_3.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_groupby_3.q.out @@ -165,8 +165,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out b/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out index 168aa77..fecc962 100644 --- a/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out @@ -86,8 +86,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -168,8 +168,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -266,8 +266,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -340,8 +340,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -462,8 +462,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -544,8 +544,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -630,8 +630,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -705,8 +705,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -791,8 +791,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -875,8 +875,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -961,8 +961,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1044,8 +1044,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1130,8 +1130,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1213,8 +1213,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1299,8 +1299,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1382,8 +1382,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1468,8 +1468,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1551,8 +1551,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_left_outer_join.q.out b/ql/src/test/results/clientpositive/spark/vector_left_outer_join.q.out index 858edfa..9f8dea3 100644 --- a/ql/src/test/results/clientpositive/spark/vector_left_outer_join.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_left_outer_join.q.out @@ -47,8 +47,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -72,8 +72,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -128,8 +128,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out b/ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out index 395939a..c35156e 100644 --- a/ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out @@ -166,8 +166,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out b/ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out index bc9d102..c36c9ec 100644 --- a/ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out @@ -109,8 +109,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -178,8 +178,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -262,8 +262,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -331,8 +331,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out b/ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out index 16c1650..ecac4da 100644 --- a/ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out @@ -269,8 +269,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -338,8 +338,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -445,8 +445,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -512,8 +512,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -711,8 +711,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -752,8 +752,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -851,8 +851,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out b/ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out index bd8e1a2..92ad63e 100644 --- a/ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out @@ -285,8 +285,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -326,8 +326,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -425,8 +425,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out b/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out index 4504a74..cee7995 100644 --- a/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out @@ -154,8 +154,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -376,8 +376,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vector_varchar_4.q.out b/ql/src/test/results/clientpositive/spark/vector_varchar_4.q.out index c6e17ab..2e5cb46 100644 --- a/ql/src/test/results/clientpositive/spark/vector_varchar_4.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_varchar_4.q.out @@ -172,8 +172,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_0.q.out b/ql/src/test/results/clientpositive/spark/vectorization_0.q.out index ee74d1f..c906d0a 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_0.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_0.q.out @@ -72,8 +72,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -251,8 +251,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -419,8 +419,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -578,8 +578,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -757,8 +757,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -925,8 +925,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1084,8 +1084,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1263,8 +1263,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1431,8 +1431,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1636,8 +1636,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_1.q.out b/ql/src/test/results/clientpositive/spark/vectorization_1.q.out index 5303fe4..13cc510 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_1.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_1.q.out @@ -105,8 +105,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_10.q.out b/ql/src/test/results/clientpositive/spark/vectorization_10.q.out index a2e5786..23a04b8 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_10.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_10.q.out @@ -97,8 +97,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_11.q.out b/ql/src/test/results/clientpositive/spark/vectorization_11.q.out index dcf7c3e..b47433d 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_11.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_11.q.out @@ -79,8 +79,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_12.q.out b/ql/src/test/results/clientpositive/spark/vectorization_12.q.out index 1c302e1..9592735 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_12.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_12.q.out @@ -132,8 +132,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_13.q.out b/ql/src/test/results/clientpositive/spark/vectorization_13.q.out index 0f1b228..241098e 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_13.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_13.q.out @@ -134,8 +134,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -484,8 +484,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_14.q.out b/ql/src/test/results/clientpositive/spark/vectorization_14.q.out index 5266764..b2b7707a 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_14.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_14.q.out @@ -134,8 +134,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_15.q.out b/ql/src/test/results/clientpositive/spark/vectorization_15.q.out index 5d51c87..835ab5f 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_15.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_15.q.out @@ -130,8 +130,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_16.q.out b/ql/src/test/results/clientpositive/spark/vectorization_16.q.out index 56bbb89..ebd3ddf 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_16.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_16.q.out @@ -107,8 +107,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_17.q.out b/ql/src/test/results/clientpositive/spark/vectorization_17.q.out index 1877fb3..5091187 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_17.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_17.q.out @@ -100,8 +100,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_2.q.out b/ql/src/test/results/clientpositive/spark/vectorization_2.q.out index 1af403a..43e5074 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_2.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_2.q.out @@ -109,8 +109,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_3.q.out b/ql/src/test/results/clientpositive/spark/vectorization_3.q.out index 51d04cc..ec6de13 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_3.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_3.q.out @@ -114,8 +114,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_4.q.out b/ql/src/test/results/clientpositive/spark/vectorization_4.q.out index 07d30cf..3c5084e 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_4.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_4.q.out @@ -109,8 +109,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_5.q.out b/ql/src/test/results/clientpositive/spark/vectorization_5.q.out index 0fb430e..e3497af 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_5.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_5.q.out @@ -102,8 +102,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_6.q.out b/ql/src/test/results/clientpositive/spark/vectorization_6.q.out index bdb014c..600926c 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_6.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_6.q.out @@ -91,8 +91,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_9.q.out b/ql/src/test/results/clientpositive/spark/vectorization_9.q.out index 56bbb89..ebd3ddf 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_9.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_9.q.out @@ -107,8 +107,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out index a0631c3..9209d48 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out @@ -71,8 +71,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_div0.q.out b/ql/src/test/results/clientpositive/spark/vectorization_div0.q.out index e6a0b5c..5a73d09 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_div0.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_div0.q.out @@ -52,8 +52,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -269,8 +269,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -486,8 +486,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -703,8 +703,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out b/ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out index 7f81581..2871c1a 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out @@ -62,8 +62,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_part_project.q.out b/ql/src/test/results/clientpositive/spark/vectorization_part_project.q.out index 53e8bbf..48165bb 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_part_project.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_part_project.q.out @@ -83,8 +83,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_pushdown.q.out b/ql/src/test/results/clientpositive/spark/vectorization_pushdown.q.out index 9001a2a..c1ad2f9 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_pushdown.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_pushdown.q.out @@ -44,8 +44,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out index 8dbd679..9879e22 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out @@ -134,8 +134,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -394,8 +394,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -646,8 +646,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -877,8 +877,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1105,8 +1105,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1400,8 +1400,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1645,8 +1645,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1947,8 +1947,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2205,8 +2205,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2480,8 +2480,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2799,8 +2799,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3199,8 +3199,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3432,8 +3432,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3545,8 +3545,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3730,8 +3730,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3843,8 +3843,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3956,8 +3956,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4069,8 +4069,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4182,8 +4182,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4295,8 +4295,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorized_case.q.out b/ql/src/test/results/clientpositive/spark/vectorized_case.q.out index b7232a8..4408be3 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_case.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_case.q.out @@ -85,8 +85,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -233,8 +233,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -322,8 +322,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -464,8 +464,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -580,7 +580,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:member:decimal(10,0), 1:attr:decimal(10,0), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:member:decimal(10,0)/DECIMAL_64, 1:attr:decimal(10,0)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: CASE WHEN ((member = 1)) THEN ((attr + 1)) ELSE ((attr + 2)) END (type: decimal(11,0)) outputColumnNames: _col0 @@ -588,7 +588,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [6] - selectExpressions: IfExprCondExprCondExpr(col 3:boolean, col 4:decimal(11,0)col 5:decimal(11,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, DecimalColAddDecimalScalar(col 1:decimal(10,0), val 1) -> 4:decimal(11,0), DecimalColAddDecimalScalar(col 1:decimal(10,0), val 2) -> 5:decimal(11,0)) -> 6:decimal(11,0) + selectExpressions: IfExprCondExprCondExpr(col 3:boolean, col 7:decimal(11,0)col 8:decimal(11,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, ConvertDecimal64ToDecimal(col 4:decimal(11,0)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 1, decimalVal 1) -> 4:decimal(11,0)/DECIMAL_64) -> 7:decimal(11,0), ConvertDecimal64ToDecimal(col 5:decimal(11,0)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 2, decimalVal 2) -> 5:decimal(11,0)/DECIMAL_64) -> 8:decimal(11,0)) -> 6:decimal(11,0) Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -604,8 +604,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -613,9 +613,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: member:decimal(10,0), attr:decimal(10,0) + dataColumns: member:decimal(10,0)/DECIMAL_64, attr:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, decimal(11,0), decimal(11,0), decimal(11,0)] + scratchColumnTypeNames: [bigint, decimal(11,0)/DECIMAL_64, decimal(11,0)/DECIMAL_64, decimal(11,0), decimal(11,0), decimal(11,0)] Stage: Stage-0 Fetch Operator @@ -660,15 +660,15 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:member:decimal(10,0), 1:attr:decimal(10,0), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:member:decimal(10,0)/DECIMAL_64, 1:attr:decimal(10,0)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: CASE WHEN ((member = 1)) THEN (1) ELSE ((attr + 2)) END (type: decimal(11,0)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [6] - selectExpressions: IfExprColumnCondExpr(col 3:boolean, col 4:decimal(1,0)col 5:decimal(11,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, ConstantVectorExpression(val 1) -> 4:decimal(1,0), DecimalColAddDecimalScalar(col 1:decimal(10,0), val 2) -> 5:decimal(11,0)) -> 6:decimal(11,0) + projectedOutputColumnNums: [8] + selectExpressions: VectorUDFAdaptor(CASE WHEN ((member = 1)) THEN (1) ELSE ((attr + 2)) END)(children: VectorUDFAdaptor((member = 1)) -> 6:boolean, Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 2, decimalVal 2) -> 7:decimal(11,0)/DECIMAL_64) -> 8:decimal(11,0) Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -684,8 +684,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -693,9 +693,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: member:decimal(10,0), attr:decimal(10,0) + dataColumns: member:decimal(10,0)/DECIMAL_64, attr:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, decimal(1,0), decimal(11,0), decimal(11,0)] + scratchColumnTypeNames: [bigint, decimal(1,0), decimal(11,0)/DECIMAL_64, bigint, decimal(11,0)/DECIMAL_64, decimal(11,0)] Stage: Stage-0 Fetch Operator @@ -740,15 +740,15 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:member:decimal(10,0), 1:attr:decimal(10,0), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:member:decimal(10,0)/DECIMAL_64, 1:attr:decimal(10,0)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: CASE WHEN ((member = 1)) THEN ((attr + 1)) ELSE (2) END (type: decimal(11,0)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [6] - selectExpressions: IfExprCondExprColumn(col 3:boolean, col 4:decimal(11,0), col 5:decimal(1,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, DecimalColAddDecimalScalar(col 1:decimal(10,0), val 1) -> 4:decimal(11,0), ConstantVectorExpression(val 2) -> 5:decimal(1,0)) -> 6:decimal(11,0) + projectedOutputColumnNums: [8] + selectExpressions: VectorUDFAdaptor(CASE WHEN ((member = 1)) THEN ((attr + 1)) ELSE (2) END)(children: VectorUDFAdaptor((member = 1)) -> 6:boolean, Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 1, decimalVal 1) -> 7:decimal(11,0)/DECIMAL_64) -> 8:decimal(11,0) Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -764,8 +764,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -773,9 +773,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: member:decimal(10,0), attr:decimal(10,0) + dataColumns: member:decimal(10,0)/DECIMAL_64, attr:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, decimal(11,0), decimal(1,0), decimal(11,0)] + scratchColumnTypeNames: [bigint, decimal(11,0)/DECIMAL_64, decimal(1,0), bigint, decimal(11,0)/DECIMAL_64, decimal(11,0)] Stage: Stage-0 Fetch Operator @@ -862,8 +862,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -942,8 +942,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1022,8 +1022,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out b/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out index 4a80422..c17290a 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out @@ -53,8 +53,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -137,8 +137,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorized_math_funcs.q.out b/ql/src/test/results/clientpositive/spark/vectorized_math_funcs.q.out index 4fbdd2e..024ff40 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_math_funcs.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_math_funcs.q.out @@ -152,8 +152,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/spark/vectorized_nested_mapjoin.q.out b/ql/src/test/results/clientpositive/spark/vectorized_nested_mapjoin.q.out index 3abce03..a035d0d 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_nested_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_nested_mapjoin.q.out @@ -36,8 +36,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -64,8 +64,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -128,8 +128,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out b/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out index 278164b..121c112 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out @@ -165,8 +165,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -381,8 +381,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -423,8 +423,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -633,8 +633,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -789,8 +789,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1004,8 +1004,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1222,8 +1222,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1442,8 +1442,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1484,8 +1484,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1663,8 +1663,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1700,8 +1700,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2275,8 +2275,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2489,8 +2489,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2772,8 +2772,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2990,8 +2990,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3032,8 +3032,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3282,8 +3282,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3517,8 +3517,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3774,8 +3774,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4228,8 +4228,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4541,8 +4541,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4849,8 +4849,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5167,8 +5167,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5494,8 +5494,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5791,8 +5791,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out b/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out index 3cbcf2c..86ea785 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out @@ -58,8 +58,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -99,8 +99,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorized_string_funcs.q.out b/ql/src/test/results/clientpositive/spark/vectorized_string_funcs.q.out index 2a229d2..39e01ad 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_string_funcs.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_string_funcs.q.out @@ -78,8 +78,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out index 18926cb..680fdd4 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out @@ -283,8 +283,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -491,8 +491,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -683,8 +683,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -875,8 +875,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1016,8 +1016,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1141,8 +1141,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1284,8 +1284,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out index 3c9cf03..d10faeb 100644 --- a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out +++ b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out @@ -370,10 +370,10 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE [(over10k_n9)over10k_n9.Fie POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ] POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ] Found 4 items --rw-rw-rw- 3 ### USER ### ### GROUP ### 8997 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7773 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7358 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7261 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 8914 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7709 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7284 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7190 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: insert into over10k_orc_bucketed_n0 select * from over10k_n9 PREHOOK: type: QUERY PREHOOK: Input: default@over10k_n9 @@ -394,14 +394,14 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE [(over10k_n9)over10k_n9.Fie POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ] POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ] Found 8 items --rw-rw-rw- 3 ### USER ### ### GROUP ### 8997 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 8997 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7773 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7773 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7358 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7358 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7261 ### HDFS DATE ### hdfs://### HDFS PATH ### --rw-rw-rw- 3 ### USER ### ### GROUP ### 7261 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 8914 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 8914 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7709 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7709 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7284 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7284 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7190 ### HDFS DATE ### hdfs://### HDFS PATH ### +-rw-rw-rw- 3 ### USER ### ### GROUP ### 7190 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: select distinct 7 as seven, INPUT__FILE__NAME from over10k_orc_bucketed_n0 PREHOOK: type: QUERY PREHOOK: Input: default@over10k_orc_bucketed_n0 @@ -680,22 +680,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: over10k_orc_bucketed_n0 - Statistics: Num rows: 1247 Data size: 713720 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1237 Data size: 707880 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ROW__ID (type: struct) outputColumnNames: ROW__ID - Statistics: Num rows: 1247 Data size: 713720 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1237 Data size: 707880 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() keys: ROW__ID (type: struct) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 623 Data size: 52332 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: struct) sort order: + Map-reduce partition columns: _col0 (type: struct) - Statistics: Num rows: 623 Data size: 52332 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Reducer 2 Reduce Operator Tree: @@ -704,13 +704,13 @@ STAGE PLANS: keys: KEY._col0 (type: struct) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 623 Data size: 52332 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (_col1 > 1L) (type: boolean) - Statistics: Num rows: 207 Data size: 17388 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 207 Data size: 17388 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out index 42bad01..52feecf 100644 --- a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out +++ b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out @@ -623,11 +623,11 @@ Stage-0 TableScan [TS_0] (rows=500/500 width=178) default@src,src,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"] -PREHOOK: query: create table orc_merge5_n1 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +PREHOOK: query: create table orc_merge5_n1 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_merge5_n1 -POSTHOOK: query: create table orc_merge5_n1 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc +POSTHOOK: query: create table orc_merge5_n1 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_merge5_n1 diff --git a/ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out b/ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out index fa72556..130029c 100644 --- a/ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out @@ -83,8 +83,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -204,8 +204,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_aggregate_9.q.out b/ql/src/test/results/clientpositive/vector_aggregate_9.q.out index fe7fbfd..9487881 100644 --- a/ql/src/test/results/clientpositive/vector_aggregate_9.q.out +++ b/ql/src/test/results/clientpositive/vector_aggregate_9.q.out @@ -158,8 +158,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -264,8 +264,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -370,8 +370,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out b/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out index 4d2b0dc..19641ee 100644 --- a/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out +++ b/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out @@ -97,8 +97,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_between_columns.q.out b/ql/src/test/results/clientpositive/vector_between_columns.q.out index 7967df1..f5e095b 100644 --- a/ql/src/test/results/clientpositive/vector_between_columns.q.out +++ b/ql/src/test/results/clientpositive/vector_between_columns.q.out @@ -173,8 +173,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -333,8 +333,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out index b66c0b0..ac9ef5c 100644 --- a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out +++ b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out @@ -170,7 +170,7 @@ STAGE PLANS: 1 _col10 (type: binary) Map Join Vectorization: bigTableKeyExpressions: col 10:binary - bigTableValueExpressions: col 0:tinyint, col 1:smallint, col 2:int, col 3:bigint, col 4:float, col 5:double, col 6:boolean, col 7:string, col 8:timestamp, col 9:decimal(4,2), col 10:binary + bigTableValueExpressions: col 0:tinyint, col 1:smallint, col 2:int, col 3:bigint, col 4:float, col 5:double, col 6:boolean, col 7:string, col 8:timestamp, ConvertDecimal64ToDecimal(col 9:decimal(4,2)/DECIMAL_64) -> 12:decimal(4,2), col 10:binary className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -211,8 +211,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -364,8 +364,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -582,8 +582,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_bround.q.out b/ql/src/test/results/clientpositive/vector_bround.q.out index 644902b..68086c2 100644 --- a/ql/src/test/results/clientpositive/vector_bround.q.out +++ b/ql/src/test/results/clientpositive/vector_bround.q.out @@ -91,8 +91,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_case_when_1.q.out b/ql/src/test/results/clientpositive/vector_case_when_1.q.out index 66807ac..59d8133 100644 --- a/ql/src/test/results/clientpositive/vector_case_when_1.q.out +++ b/ql/src/test/results/clientpositive/vector_case_when_1.q.out @@ -140,7 +140,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT @@ -182,7 +181,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY Explain PLAN VECTORIZATION: @@ -204,33 +202,19 @@ STAGE PLANS: expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + + File Output Operator + compressed: false Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: date), _col5 (type: double), _col6 (type: double), _col7 (type: decimal(10,2)), _col8 (type: decimal(10,2)), _col9 (type: decimal(12,2)), _col10 (type: decimal(12,2)), _col11 (type: decimal(10,2)), _col12 (type: decimal(10,2)), _col13 (type: timestamp), _col14 (type: int), _col15 (type: int), _col16 (type: date) + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat notVectorizedReason: SELECT operator: Unexpected hive type name void vectorized: false - Reduce Vectorization: - enabled: false - enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true - enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Reduce Operator Tree: - Select Operator - expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: date), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: decimal(10,2)), VALUE._col7 (type: decimal(10,2)), VALUE._col8 (type: decimal(12,2)), VALUE._col9 (type: decimal(12,2)), VALUE._col10 (type: decimal(10,2)), VALUE._col11 (type: decimal(10,2)), VALUE._col12 (type: timestamp), VALUE._col13 (type: int), VALUE._col14 (type: int), VALUE._col15 (type: date) - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -277,7 +261,6 @@ PREHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY PREHOOK: Input: default@lineitem_test #### A masked pattern was here #### @@ -320,112 +303,111 @@ POSTHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY POSTHOOK: Input: default@lineitem_test #### A masked pattern was here #### quantity quantity_description quantity_description_2 quantity_description_3 expected_date field_1 field_2 field_3 field_4 field_5 field_6 field_7 field_8 field_9 field_10 field_11 field_12 -NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 -1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 1 Single Single Single 1994-01-31 0.0 0.0 0.05 0.05 0.05 0.05 0.05 0.05 1994-01-28 00:00:00 NULL -36 2009-01-01 -2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 -2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 -3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 -3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 -3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 -4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 -4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 -5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 -5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 -5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 -6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 -6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 -7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 -8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 -8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 -9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 11 Many Many NULL 1994-03-22 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1994-03-27 00:00:00 NULL 10 2009-01-01 12 Many Many NULL 1996-05-12 12655.998 12655.998 0.03 0.03 0.03 0.03 0.03 0.03 1996-06-03 00:00:00 NULL 82 2009-01-01 12 Many Many NULL 1997-02-01 12156.034800000001 12156.034800000001 0.05 NULL 0.05 0.00 0.05 0.00 1997-02-22 00:00:00 NULL 1 2009-01-01 +13 Many Many NULL 1993-04-06 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1993-04-08 00:00:00 NULL 4 2009-01-01 13 Many Many NULL 1994-03-08 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-03-26 00:00:00 NULL 41 2009-01-01 13 Many Many NULL 1998-10-28 17554.68 17554.68 0.07 NULL 0.07 0.00 0.07 0.00 1998-11-06 00:00:00 NULL 53 2009-01-01 -13 Many Many NULL 1993-04-06 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1993-04-08 00:00:00 NULL 4 2009-01-01 14 Many Many NULL 1995-01-04 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1995-01-27 00:00:00 NULL 66 2009-01-01 15 Many Many NULL 1994-11-05 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-11-20 00:00:00 NULL 81 2009-12-31 -17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 17 Many Many NULL 1994-07-07 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-07-03 00:00:00 NULL -4 2009-01-01 +17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 19 Many Many NULL 1993-05-19 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1993-05-25 00:00:00 NULL 81 2009-01-01 19 Many Many NULL 1994-02-05 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1994-02-06 00:00:00 NULL -11 2009-01-01 +2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 +2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 20 Many Many NULL 1998-07-02 32042.592 32042.592 0.01 NULL 0.01 0.00 0.01 0.00 1998-07-02 00:00:00 NULL 40 2009-01-01 -21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 21 Many Many NULL 1994-10-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-10-26 00:00:00 NULL 38 2009-01-01 -22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 +21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 22 Many Many NULL 1995-07-22 39353.82 39353.82 0.05 NULL 0.05 0.00 0.05 0.00 1995-07-19 00:00:00 NULL 45 2009-01-01 -23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 -23 Many Many NULL 1994-10-13 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-10-24 00:00:00 NULL 79 2009-12-31 +22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 23 Many Many NULL 1994-07-24 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-07-25 00:00:00 NULL 26 2009-01-01 -24 Many Many NULL 1996-04-04 20542.032 20542.032 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-01 00:00:00 NULL 18 2009-12-31 +23 Many Many NULL 1994-10-13 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-10-24 00:00:00 NULL 79 2009-12-31 +23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 24 Many Many NULL 1996-02-26 31762.584 31762.584 0.00 0.00 0.00 0.00 0.00 0.00 1996-03-18 00:00:00 NULL 75 2009-01-01 -25 Many Many NULL 1998-04-15 43064.1575 43064.1575 0.07 NULL 0.07 0.00 0.07 0.00 1998-04-11 00:00:00 NULL -11 2009-01-01 +24 Many Many NULL 1996-04-04 20542.032 20542.032 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-01 00:00:00 NULL 18 2009-12-31 25 Many Many NULL 1995-12-06 27263.995 27263.995 NULL NULL 0.00 0.00 0.00 0.00 1995-12-21 00:00:00 NULL -4 2009-01-01 -26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 -26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 -26 Many Many NULL 1994-10-21 0.0 0.0 0.08 NULL 0.08 0.00 0.08 0.00 1994-10-19 00:00:00 NULL 24 2009-01-01 +25 Many Many NULL 1998-04-15 43064.1575 43064.1575 0.07 NULL 0.07 0.00 0.07 0.00 1998-04-11 00:00:00 NULL -11 2009-01-01 26 Many Many NULL 1993-11-03 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1993-11-04 00:00:00 NULL -44 2009-01-01 +26 Many Many NULL 1994-10-21 0.0 0.0 0.08 NULL 0.08 0.00 0.08 0.00 1994-10-19 00:00:00 NULL 24 2009-01-01 +26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 +26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 27 Many Many NULL 1994-01-26 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-01-23 00:00:00 NULL 62 2009-01-01 27 Many Many NULL 1998-06-29 45590.2425 45590.2425 NULL NULL 0.00 0.00 0.00 0.00 1998-06-29 00:00:00 NULL 4 2009-01-01 -28 Many Many NULL 1995-10-28 44866.219999999994 44866.219999999994 0.08 0.08 0.08 0.08 0.08 0.08 1995-10-26 00:00:00 NULL 60 2009-01-01 -28 Many Many NULL 1994-12-29 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1995-01-16 00:00:00 NULL 83 2009-01-01 -28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 -28 Many Many NULL 1996-03-26 30855.6612 30855.6612 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-20 00:00:00 NULL 12 2009-12-31 28 Many Many NULL 1993-12-19 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-01-01 00:00:00 NULL -9 2009-01-01 +28 Many Many NULL 1994-12-29 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1995-01-16 00:00:00 NULL 83 2009-01-01 +28 Many Many NULL 1995-10-28 44866.219999999994 44866.219999999994 0.08 0.08 0.08 0.08 0.08 0.08 1995-10-26 00:00:00 NULL 60 2009-01-01 28 Many Many NULL 1996-02-06 45975.3616 45975.3616 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-28 00:00:00 NULL 66 2009-01-01 +28 Many Many NULL 1996-03-26 30855.6612 30855.6612 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-20 00:00:00 NULL 12 2009-12-31 +28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 29 Many Many NULL 1997-01-30 39341.806 39341.806 NULL NULL 0.00 0.00 0.00 0.00 1997-01-27 00:00:00 NULL 0 2009-01-01 +3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 +3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 +3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 30 Many Many NULL 1994-06-08 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-06-22 00:00:00 NULL 24 2009-01-01 30 Many Many NULL 1996-01-15 29770.173 29770.173 NULL NULL 0.00 0.00 0.00 0.00 1996-01-18 00:00:00 NULL 35 2009-12-31 30 Many Many NULL 1998-08-16 44561.46 44561.46 0.06 NULL 0.06 0.00 0.06 0.00 1998-08-14 00:00:00 NULL 34 2009-12-31 -31 Many Many NULL 1994-02-24 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-02-20 00:00:00 NULL -19 2009-01-01 31 Many Many NULL 1993-11-03 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1993-11-08 00:00:00 NULL -41 2009-01-01 -32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 +31 Many Many NULL 1994-02-24 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-02-20 00:00:00 NULL -19 2009-01-01 32 Many Many NULL 1993-12-14 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1993-12-28 00:00:00 NULL -7 2009-12-31 32 Many Many NULL 1994-08-29 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-08-31 00:00:00 NULL 14 2009-01-01 -32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 +32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 32 Many Many NULL 1996-02-04 46146.7488 46146.7488 NULL NULL 0.00 0.00 0.00 0.00 1996-02-03 00:00:00 NULL -4 2009-01-01 +32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 33 Many Many NULL 1998-04-17 54174.12 54174.12 0.01 NULL 0.01 0.00 0.01 0.00 1998-04-15 00:00:00 NULL 26 2009-01-01 -34 Many Many NULL 1998-03-10 56487.763199999994 56487.763199999994 NULL NULL 0.00 0.00 0.00 0.00 1998-03-30 00:00:00 NULL -23 2009-01-01 -34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 34 Many Many NULL 1995-11-13 60586.5448 60586.5448 0.06 NULL 0.06 0.00 0.06 0.00 1995-11-26 00:00:00 NULL -50 2009-01-01 +34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 +34 Many Many NULL 1998-03-10 56487.763199999994 56487.763199999994 NULL NULL 0.00 0.00 0.00 0.00 1998-03-30 00:00:00 NULL -23 2009-01-01 35 Many Many NULL 1996-01-21 40475.225 40475.225 0.03 0.03 0.03 0.03 0.03 0.03 1996-01-22 00:00:00 NULL -32 2009-01-01 36 Many Many NULL 1996-04-17 41844.6756 41844.6756 0.06 0.06 0.06 0.06 0.06 0.06 1996-04-20 00:00:00 NULL 52 2009-01-01 -37 Many Many NULL 1994-02-18 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-02-21 00:00:00 NULL -23 2009-01-01 -37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 37 Many Many NULL 1992-05-02 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1992-05-02 00:00:00 NULL -13 2009-01-01 -38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 +37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 +37 Many Many NULL 1994-02-18 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-02-21 00:00:00 NULL -23 2009-01-01 38 Many Many NULL 1996-02-16 68028.3144 68028.3144 NULL NULL 0.00 0.00 0.00 0.00 1996-02-18 00:00:00 NULL -6 2009-01-01 -39 Many Many NULL 1998-02-03 45146.01 45146.01 NULL NULL 0.00 0.00 0.00 0.00 1998-02-18 00:00:00 NULL -48 2009-01-01 +38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 39 Many Many NULL 1992-07-07 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1992-07-28 00:00:00 NULL -21 2009-01-01 -40 Many Many NULL 1996-12-13 51224.736 51224.736 0.05 NULL 0.05 0.00 0.05 0.00 1997-01-01 00:00:00 NULL 71 2009-01-01 +39 Many Many NULL 1998-02-03 45146.01 45146.01 NULL NULL 0.00 0.00 0.00 0.00 1998-02-18 00:00:00 NULL -48 2009-01-01 +4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 +4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 40 Many Many NULL 1992-07-26 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1992-08-15 00:00:00 NULL 14 2009-01-01 -41 Many Many NULL 1998-07-04 47989.6144 47989.6144 0.08 NULL 0.08 0.00 0.08 0.00 1998-07-06 00:00:00 NULL 9 2009-01-01 -41 Many Many NULL 1994-02-26 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-03-18 00:00:00 NULL 17 2009-01-01 +40 Many Many NULL 1996-12-13 51224.736 51224.736 0.05 NULL 0.05 0.00 0.05 0.00 1997-01-01 00:00:00 NULL 71 2009-01-01 41 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-11 00:00:00 NULL -74 2009-01-01 +41 Many Many NULL 1994-02-26 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-03-18 00:00:00 NULL 17 2009-01-01 +41 Many Many NULL 1998-07-04 47989.6144 47989.6144 0.08 NULL 0.08 0.00 0.08 0.00 1998-07-06 00:00:00 NULL 9 2009-01-01 42 Many Many NULL 1994-08-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-28 00:00:00 NULL 33 2009-12-31 42 Many Many NULL 1996-02-13 68289.9672 68289.9672 0.00 NULL 0.00 0.00 0.00 0.00 1996-02-23 00:00:00 NULL 33 2009-01-01 -43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 43 Many Many NULL 1992-07-15 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1992-08-02 00:00:00 NULL 27 2009-01-01 -44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 -44 Many Many NULL 1996-10-04 80882.4192 80882.4192 0.02 NULL 0.02 0.00 0.02 0.00 1996-09-30 00:00:00 NULL -48 2009-01-01 +43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 44 Many Many NULL 1995-09-02 75106.658 75106.658 NULL NULL 0.00 0.00 0.00 0.00 1995-09-14 00:00:00 NULL 25 2009-01-01 +44 Many Many NULL 1996-10-04 80882.4192 80882.4192 0.02 NULL 0.02 0.00 0.02 0.00 1996-09-30 00:00:00 NULL -48 2009-01-01 44 Many Many NULL 1996-11-19 48941.692800000004 48941.692800000004 0.06 NULL 0.06 0.00 0.06 0.00 1996-12-12 00:00:00 NULL -3 2009-01-01 -45 Many Many NULL 1998-03-05 61489.35 61489.35 NULL NULL 0.00 0.00 0.00 0.00 1998-03-24 00:00:00 NULL 4 2009-01-01 +44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 45 Many Many NULL 1994-02-07 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-02-23 00:00:00 NULL 50 2009-01-01 +45 Many Many NULL 1998-03-05 61489.35 61489.35 NULL NULL 0.00 0.00 0.00 0.00 1998-03-24 00:00:00 NULL 4 2009-01-01 46 Many Many NULL 1996-01-20 73475.892 73475.892 0.07 NULL 0.07 0.00 0.07 0.00 1996-02-03 00:00:00 NULL -53 2009-01-01 +46 Many Many NULL 1996-10-01 77781.4092 77781.4092 NULL NULL 0.00 0.00 0.00 0.00 1996-10-26 00:00:00 NULL -54 2009-01-01 46 Many Many NULL 1998-07-01 56583.5144 56583.5144 0.05 NULL 0.05 0.00 0.05 0.00 1998-07-05 00:00:00 NULL 28 2009-01-01 46 Many Many NULL 1998-08-18 84565.5168 84565.5168 0.05 NULL 0.05 0.00 0.05 0.00 1998-08-29 00:00:00 NULL 52 2009-01-01 -46 Many Many NULL 1996-10-01 77781.4092 77781.4092 NULL NULL 0.00 0.00 0.00 0.00 1996-10-26 00:00:00 NULL -54 2009-01-01 48 Many Many NULL 1994-08-22 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-09-08 00:00:00 NULL 28 2009-01-01 49 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-24 00:00:00 NULL -26 2009-12-31 +5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 +5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 +5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 50 Many Many NULL 1994-08-13 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-26 00:00:00 NULL -48 2009-12-31 +6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 +6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 +7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 +8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 +8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 +9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT L_QUANTITY as Quantity, @@ -466,7 +448,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT @@ -508,7 +489,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY Explain PLAN VECTORIZATION: @@ -528,7 +508,7 @@ STAGE PLANS: Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2), 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct] + vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2)/DECIMAL_64, 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct] Select Operator expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 @@ -536,24 +516,24 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [4, 22, 24, 25, 26, 27, 28, 30, 31, 32, 33, 34, 35, 38, 40, 43, 44] - selectExpressions: IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 21:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 22:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 21:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprStringScalarStringScalar(col 20:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string) -> 22:string) -> 21:string) -> 22:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 24:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprColumnNull(col 20:boolean, col 21:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean, ConstantVectorExpression(val Many) -> 21:string) -> 23:string) -> 24:string) -> 23:string) -> 24:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprNullNull(null, null) -> 23:string) -> 25:string) -> 23:string) -> 25:string, IfExprLongColumnLongColumn(col 17:boolean, col 18:date, col 19:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 17:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 18:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 19:date) -> 26:date, IfExprDoubleColumnLongScalar(col 17:boolean, col 28:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 27:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 27:double) -> 28:double) -> 27:double, IfExprDoubleColumnDoubleScalar(col 17:boolean, col 29:double, val 0.0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 28:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 28:double) -> 29:double) -> 28:double, IfExprNullColumn(col 17:boolean, null, col 7)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 17:boolean, col 7:decimal(10,2)) -> 30:decimal(10,2), IfExprColumnNull(col 18:boolean, col 7:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 18:boolean, col 7:decimal(10,2)) -> 31:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 32:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 33:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 34:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 35:decimal(10,2), IfExprTimestampColumnColumn(col 19:boolean, col 36:timestampcol 37:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 19:boolean, CastDateToTimestamp(col 12:date) -> 36:timestamp, CastDateToTimestamp(col 11:date) -> 37:timestamp) -> 38:timestamp, IfExprColumnNull(col 19:boolean, col 39:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 19:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 39:int) -> 40:int, IfExprNullColumn(col 41:boolean, null, col 42)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 41:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 42:int) -> 43:int, IfExprLongScalarLongScalar(col 45:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 44:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 44:int) -> 45:boolean) -> 44:date + selectExpressions: IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 21:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 22:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 21:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprStringScalarStringScalar(col 20:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string) -> 22:string) -> 21:string) -> 22:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 24:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprColumnNull(col 20:boolean, col 21:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean, ConstantVectorExpression(val Many) -> 21:string) -> 23:string) -> 24:string) -> 23:string) -> 24:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprNullNull(null, null) -> 23:string) -> 25:string) -> 23:string) -> 25:string, IfExprLongColumnLongColumn(col 17:boolean, col 18:date, col 19:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 17:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 18:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 19:date) -> 26:date, IfExprDoubleColumnLongScalar(col 17:boolean, col 28:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 27:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 27:double) -> 28:double) -> 27:double, IfExprDoubleColumnDoubleScalar(col 17:boolean, col 29:double, val 0.0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 28:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 28:double) -> 29:double) -> 28:double, IfExprNullColumn(col 17:boolean, null, col 46)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 17:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 46:decimal(10,2)) -> 30:decimal(10,2), IfExprColumnNull(col 18:boolean, col 47:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 18:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 47:decimal(10,2)) -> 31:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 32:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 33:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 34:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 35:decimal(10,2), IfExprTimestampColumnColumn(col 19:boolean, col 36:timestampcol 37:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 19:boolean, CastDateToTimestamp(col 12:date) -> 36:timestamp, CastDateToTimestamp(col 11:date) -> 37:timestamp) -> 38:timestamp, IfExprColumnNull(col 19:boolean, col 39:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 19:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 39:int) -> 40:int, IfExprNullColumn(col 41:boolean, null, col 42)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 41:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 42:int) -> 43:int, IfExprLongScalarLongScalar(col 45:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 44:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 44:int) -> 45:boolean) -> 44:date Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Reduce Sink Vectorization: - className: VectorReduceSinkOperator + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator native: false - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: date), _col5 (type: double), _col6 (type: double), _col7 (type: decimal(10,2)), _col8 (type: decimal(10,2)), _col9 (type: decimal(12,2)), _col10 (type: decimal(12,2)), _col11 (type: decimal(10,2)), _col12 (type: decimal(10,2)), _col13 (type: timestamp), _col14 (type: int), _col15 (type: int), _col16 (type: date) + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -561,25 +541,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 16 includeColumns: [1, 2, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14] - dataColumns: l_orderkey:int, l_partkey:int, l_suppkey:int, l_linenumber:int, l_quantity:int, l_extendedprice:double, l_discount:double, l_tax:decimal(10,2), l_returnflag:char(1), l_linestatus:char(1), l_shipdate:date, l_commitdate:date, l_receiptdate:date, l_shipinstruct:varchar(20), l_shipmode:char(10), l_comment:string + dataColumns: l_orderkey:int, l_partkey:int, l_suppkey:int, l_linenumber:int, l_quantity:int, l_extendedprice:double, l_discount:double, l_tax:decimal(10,2)/DECIMAL_64, l_returnflag:char(1), l_linestatus:char(1), l_shipdate:date, l_commitdate:date, l_receiptdate:date, l_shipinstruct:varchar(20), l_shipmode:char(10), l_comment:string partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, bigint, bigint, bigint, string, string, string, string, string, bigint, double, double, double, decimal(10,2), decimal(10,2), decimal(12,2), decimal(12,2), decimal(10,2), decimal(10,2), timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint] - Reduce Vectorization: - enabled: false - enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true - enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Reduce Operator Tree: - Select Operator - expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: date), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: decimal(10,2)), VALUE._col7 (type: decimal(10,2)), VALUE._col8 (type: decimal(12,2)), VALUE._col9 (type: decimal(12,2)), VALUE._col10 (type: decimal(10,2)), VALUE._col11 (type: decimal(10,2)), VALUE._col12 (type: timestamp), VALUE._col13 (type: int), VALUE._col14 (type: int), VALUE._col15 (type: date) - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + scratchColumnTypeNames: [bigint, bigint, bigint, bigint, string, string, string, string, string, bigint, double, double, double, decimal(10,2), decimal(10,2), decimal(12,2), decimal(12,2), decimal(10,2), decimal(10,2), timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint, decimal(10,2), decimal(10,2)] Stage: Stage-0 Fetch Operator @@ -626,7 +590,6 @@ PREHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY PREHOOK: Input: default@lineitem_test #### A masked pattern was here #### @@ -669,112 +632,111 @@ POSTHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY POSTHOOK: Input: default@lineitem_test #### A masked pattern was here #### quantity quantity_description quantity_description_2 quantity_description_3 expected_date field_1 field_2 field_3 field_4 field_5 field_6 field_7 field_8 field_9 field_10 field_11 field_12 -NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 -1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 1 Single Single Single 1994-01-31 0.0 0.0 0.05 0.05 0.05 0.05 0.05 0.05 1994-01-28 00:00:00 NULL -36 2009-01-01 -2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 -2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 -3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 -3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 -3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 -4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 -4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 -5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 -5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 -5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 -6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 -6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 -7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 -8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 -8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 -9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 11 Many Many NULL 1994-03-22 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1994-03-27 00:00:00 NULL 10 2009-01-01 12 Many Many NULL 1996-05-12 12655.998 12655.998 0.03 0.03 0.03 0.03 0.03 0.03 1996-06-03 00:00:00 NULL 82 2009-01-01 12 Many Many NULL 1997-02-01 12156.034800000001 12156.034800000001 0.05 NULL 0.05 0.00 0.05 0.00 1997-02-22 00:00:00 NULL 1 2009-01-01 +13 Many Many NULL 1993-04-06 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1993-04-08 00:00:00 NULL 4 2009-01-01 13 Many Many NULL 1994-03-08 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-03-26 00:00:00 NULL 41 2009-01-01 13 Many Many NULL 1998-10-28 17554.68 17554.68 0.07 NULL 0.07 0.00 0.07 0.00 1998-11-06 00:00:00 NULL 53 2009-01-01 -13 Many Many NULL 1993-04-06 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1993-04-08 00:00:00 NULL 4 2009-01-01 14 Many Many NULL 1995-01-04 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1995-01-27 00:00:00 NULL 66 2009-01-01 15 Many Many NULL 1994-11-05 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-11-20 00:00:00 NULL 81 2009-12-31 -17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 17 Many Many NULL 1994-07-07 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-07-03 00:00:00 NULL -4 2009-01-01 +17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 19 Many Many NULL 1993-05-19 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1993-05-25 00:00:00 NULL 81 2009-01-01 19 Many Many NULL 1994-02-05 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1994-02-06 00:00:00 NULL -11 2009-01-01 +2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 +2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 20 Many Many NULL 1998-07-02 32042.592 32042.592 0.01 NULL 0.01 0.00 0.01 0.00 1998-07-02 00:00:00 NULL 40 2009-01-01 -21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 21 Many Many NULL 1994-10-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-10-26 00:00:00 NULL 38 2009-01-01 -22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 +21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 22 Many Many NULL 1995-07-22 39353.82 39353.82 0.05 NULL 0.05 0.00 0.05 0.00 1995-07-19 00:00:00 NULL 45 2009-01-01 -23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 -23 Many Many NULL 1994-10-13 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-10-24 00:00:00 NULL 79 2009-12-31 +22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 23 Many Many NULL 1994-07-24 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-07-25 00:00:00 NULL 26 2009-01-01 -24 Many Many NULL 1996-04-04 20542.032 20542.032 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-01 00:00:00 NULL 18 2009-12-31 +23 Many Many NULL 1994-10-13 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-10-24 00:00:00 NULL 79 2009-12-31 +23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 24 Many Many NULL 1996-02-26 31762.584 31762.584 0.00 0.00 0.00 0.00 0.00 0.00 1996-03-18 00:00:00 NULL 75 2009-01-01 -25 Many Many NULL 1998-04-15 43064.1575 43064.1575 0.07 NULL 0.07 0.00 0.07 0.00 1998-04-11 00:00:00 NULL -11 2009-01-01 +24 Many Many NULL 1996-04-04 20542.032 20542.032 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-01 00:00:00 NULL 18 2009-12-31 25 Many Many NULL 1995-12-06 27263.995 27263.995 NULL NULL 0.00 0.00 0.00 0.00 1995-12-21 00:00:00 NULL -4 2009-01-01 -26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 -26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 -26 Many Many NULL 1994-10-21 0.0 0.0 0.08 NULL 0.08 0.00 0.08 0.00 1994-10-19 00:00:00 NULL 24 2009-01-01 +25 Many Many NULL 1998-04-15 43064.1575 43064.1575 0.07 NULL 0.07 0.00 0.07 0.00 1998-04-11 00:00:00 NULL -11 2009-01-01 26 Many Many NULL 1993-11-03 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1993-11-04 00:00:00 NULL -44 2009-01-01 +26 Many Many NULL 1994-10-21 0.0 0.0 0.08 NULL 0.08 0.00 0.08 0.00 1994-10-19 00:00:00 NULL 24 2009-01-01 +26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 +26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 27 Many Many NULL 1994-01-26 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-01-23 00:00:00 NULL 62 2009-01-01 27 Many Many NULL 1998-06-29 45590.2425 45590.2425 NULL NULL 0.00 0.00 0.00 0.00 1998-06-29 00:00:00 NULL 4 2009-01-01 -28 Many Many NULL 1995-10-28 44866.219999999994 44866.219999999994 0.08 0.08 0.08 0.08 0.08 0.08 1995-10-26 00:00:00 NULL 60 2009-01-01 -28 Many Many NULL 1994-12-29 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1995-01-16 00:00:00 NULL 83 2009-01-01 -28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 -28 Many Many NULL 1996-03-26 30855.6612 30855.6612 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-20 00:00:00 NULL 12 2009-12-31 28 Many Many NULL 1993-12-19 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-01-01 00:00:00 NULL -9 2009-01-01 +28 Many Many NULL 1994-12-29 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1995-01-16 00:00:00 NULL 83 2009-01-01 +28 Many Many NULL 1995-10-28 44866.219999999994 44866.219999999994 0.08 0.08 0.08 0.08 0.08 0.08 1995-10-26 00:00:00 NULL 60 2009-01-01 28 Many Many NULL 1996-02-06 45975.3616 45975.3616 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-28 00:00:00 NULL 66 2009-01-01 +28 Many Many NULL 1996-03-26 30855.6612 30855.6612 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-20 00:00:00 NULL 12 2009-12-31 +28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 29 Many Many NULL 1997-01-30 39341.806 39341.806 NULL NULL 0.00 0.00 0.00 0.00 1997-01-27 00:00:00 NULL 0 2009-01-01 +3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 +3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 +3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 30 Many Many NULL 1994-06-08 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-06-22 00:00:00 NULL 24 2009-01-01 30 Many Many NULL 1996-01-15 29770.173 29770.173 NULL NULL 0.00 0.00 0.00 0.00 1996-01-18 00:00:00 NULL 35 2009-12-31 30 Many Many NULL 1998-08-16 44561.46 44561.46 0.06 NULL 0.06 0.00 0.06 0.00 1998-08-14 00:00:00 NULL 34 2009-12-31 -31 Many Many NULL 1994-02-24 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-02-20 00:00:00 NULL -19 2009-01-01 31 Many Many NULL 1993-11-03 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1993-11-08 00:00:00 NULL -41 2009-01-01 -32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 +31 Many Many NULL 1994-02-24 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-02-20 00:00:00 NULL -19 2009-01-01 32 Many Many NULL 1993-12-14 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1993-12-28 00:00:00 NULL -7 2009-12-31 32 Many Many NULL 1994-08-29 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-08-31 00:00:00 NULL 14 2009-01-01 -32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 +32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 32 Many Many NULL 1996-02-04 46146.7488 46146.7488 NULL NULL 0.00 0.00 0.00 0.00 1996-02-03 00:00:00 NULL -4 2009-01-01 +32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 33 Many Many NULL 1998-04-17 54174.12 54174.12 0.01 NULL 0.01 0.00 0.01 0.00 1998-04-15 00:00:00 NULL 26 2009-01-01 -34 Many Many NULL 1998-03-10 56487.763199999994 56487.763199999994 NULL NULL 0.00 0.00 0.00 0.00 1998-03-30 00:00:00 NULL -23 2009-01-01 -34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 34 Many Many NULL 1995-11-13 60586.5448 60586.5448 0.06 NULL 0.06 0.00 0.06 0.00 1995-11-26 00:00:00 NULL -50 2009-01-01 +34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 +34 Many Many NULL 1998-03-10 56487.763199999994 56487.763199999994 NULL NULL 0.00 0.00 0.00 0.00 1998-03-30 00:00:00 NULL -23 2009-01-01 35 Many Many NULL 1996-01-21 40475.225 40475.225 0.03 0.03 0.03 0.03 0.03 0.03 1996-01-22 00:00:00 NULL -32 2009-01-01 36 Many Many NULL 1996-04-17 41844.6756 41844.6756 0.06 0.06 0.06 0.06 0.06 0.06 1996-04-20 00:00:00 NULL 52 2009-01-01 -37 Many Many NULL 1994-02-18 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-02-21 00:00:00 NULL -23 2009-01-01 -37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 37 Many Many NULL 1992-05-02 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1992-05-02 00:00:00 NULL -13 2009-01-01 -38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 +37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 +37 Many Many NULL 1994-02-18 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-02-21 00:00:00 NULL -23 2009-01-01 38 Many Many NULL 1996-02-16 68028.3144 68028.3144 NULL NULL 0.00 0.00 0.00 0.00 1996-02-18 00:00:00 NULL -6 2009-01-01 -39 Many Many NULL 1998-02-03 45146.01 45146.01 NULL NULL 0.00 0.00 0.00 0.00 1998-02-18 00:00:00 NULL -48 2009-01-01 +38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 39 Many Many NULL 1992-07-07 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1992-07-28 00:00:00 NULL -21 2009-01-01 -40 Many Many NULL 1996-12-13 51224.736 51224.736 0.05 NULL 0.05 0.00 0.05 0.00 1997-01-01 00:00:00 NULL 71 2009-01-01 +39 Many Many NULL 1998-02-03 45146.01 45146.01 NULL NULL 0.00 0.00 0.00 0.00 1998-02-18 00:00:00 NULL -48 2009-01-01 +4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 +4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 40 Many Many NULL 1992-07-26 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1992-08-15 00:00:00 NULL 14 2009-01-01 -41 Many Many NULL 1998-07-04 47989.6144 47989.6144 0.08 NULL 0.08 0.00 0.08 0.00 1998-07-06 00:00:00 NULL 9 2009-01-01 -41 Many Many NULL 1994-02-26 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-03-18 00:00:00 NULL 17 2009-01-01 +40 Many Many NULL 1996-12-13 51224.736 51224.736 0.05 NULL 0.05 0.00 0.05 0.00 1997-01-01 00:00:00 NULL 71 2009-01-01 41 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-11 00:00:00 NULL -74 2009-01-01 +41 Many Many NULL 1994-02-26 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-03-18 00:00:00 NULL 17 2009-01-01 +41 Many Many NULL 1998-07-04 47989.6144 47989.6144 0.08 NULL 0.08 0.00 0.08 0.00 1998-07-06 00:00:00 NULL 9 2009-01-01 42 Many Many NULL 1994-08-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-28 00:00:00 NULL 33 2009-12-31 42 Many Many NULL 1996-02-13 68289.9672 68289.9672 0.00 NULL 0.00 0.00 0.00 0.00 1996-02-23 00:00:00 NULL 33 2009-01-01 -43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 43 Many Many NULL 1992-07-15 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1992-08-02 00:00:00 NULL 27 2009-01-01 -44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 -44 Many Many NULL 1996-10-04 80882.4192 80882.4192 0.02 NULL 0.02 0.00 0.02 0.00 1996-09-30 00:00:00 NULL -48 2009-01-01 +43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 44 Many Many NULL 1995-09-02 75106.658 75106.658 NULL NULL 0.00 0.00 0.00 0.00 1995-09-14 00:00:00 NULL 25 2009-01-01 +44 Many Many NULL 1996-10-04 80882.4192 80882.4192 0.02 NULL 0.02 0.00 0.02 0.00 1996-09-30 00:00:00 NULL -48 2009-01-01 44 Many Many NULL 1996-11-19 48941.692800000004 48941.692800000004 0.06 NULL 0.06 0.00 0.06 0.00 1996-12-12 00:00:00 NULL -3 2009-01-01 -45 Many Many NULL 1998-03-05 61489.35 61489.35 NULL NULL 0.00 0.00 0.00 0.00 1998-03-24 00:00:00 NULL 4 2009-01-01 +44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 45 Many Many NULL 1994-02-07 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-02-23 00:00:00 NULL 50 2009-01-01 +45 Many Many NULL 1998-03-05 61489.35 61489.35 NULL NULL 0.00 0.00 0.00 0.00 1998-03-24 00:00:00 NULL 4 2009-01-01 46 Many Many NULL 1996-01-20 73475.892 73475.892 0.07 NULL 0.07 0.00 0.07 0.00 1996-02-03 00:00:00 NULL -53 2009-01-01 +46 Many Many NULL 1996-10-01 77781.4092 77781.4092 NULL NULL 0.00 0.00 0.00 0.00 1996-10-26 00:00:00 NULL -54 2009-01-01 46 Many Many NULL 1998-07-01 56583.5144 56583.5144 0.05 NULL 0.05 0.00 0.05 0.00 1998-07-05 00:00:00 NULL 28 2009-01-01 46 Many Many NULL 1998-08-18 84565.5168 84565.5168 0.05 NULL 0.05 0.00 0.05 0.00 1998-08-29 00:00:00 NULL 52 2009-01-01 -46 Many Many NULL 1996-10-01 77781.4092 77781.4092 NULL NULL 0.00 0.00 0.00 0.00 1996-10-26 00:00:00 NULL -54 2009-01-01 48 Many Many NULL 1994-08-22 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-09-08 00:00:00 NULL 28 2009-01-01 49 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-24 00:00:00 NULL -26 2009-12-31 +5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 +5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 +5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 50 Many Many NULL 1994-08-13 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-26 00:00:00 NULL -48 2009-12-31 +6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 +6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 +7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 +8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 +8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 +9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT L_QUANTITY as Quantity, @@ -815,7 +777,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT @@ -857,7 +818,6 @@ SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY Explain PLAN VECTORIZATION: @@ -877,7 +837,7 @@ STAGE PLANS: Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2), 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct] + vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2)/DECIMAL_64, 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct] Select Operator expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 @@ -885,24 +845,24 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [4, 27, 38, 48, 52, 54, 60, 63, 65, 67, 68, 69, 70, 73, 76, 79, 80] - selectExpressions: IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 26:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, ConstantVectorExpression(val Single) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 19:boolean, ConstantVectorExpression(val Two) -> 20:string, IfExprColumnCondExpr(col 21:boolean, col 22:stringcol 24:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 21:boolean, ConstantVectorExpression(val Some) -> 22:string, IfExprStringScalarStringScalar(col 23:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 23:boolean) -> 24:string) -> 25:string) -> 26:string) -> 27:string, IfExprColumnCondExpr(col 23:boolean, col 28:stringcol 37:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 23:boolean, ConstantVectorExpression(val Single) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 36:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 29:boolean, ConstantVectorExpression(val Two) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 35:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 31:boolean, ConstantVectorExpression(val Some) -> 32:string, IfExprColumnNull(col 33:boolean, col 34:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 33:boolean, ConstantVectorExpression(val Many) -> 34:string) -> 35:string) -> 36:string) -> 37:string) -> 38:string, IfExprColumnCondExpr(col 39:boolean, col 40:stringcol 47:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 39:boolean, ConstantVectorExpression(val Single) -> 40:string, IfExprColumnCondExpr(col 41:boolean, col 42:stringcol 46:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 41:boolean, ConstantVectorExpression(val Two) -> 42:string, IfExprColumnCondExpr(col 43:boolean, col 44:stringcol 45:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 43:boolean, ConstantVectorExpression(val Some) -> 44:string, IfExprNullNull(null, null) -> 45:string) -> 46:string) -> 47:string) -> 48:string, IfExprCondExprCondExpr(col 49:boolean, col 50:datecol 51:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 49:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 50:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 51:date) -> 52:date, IfExprDoubleColumnLongScalar(col 57:boolean, col 58:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 54:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 54:double) -> 58:double) -> 54:double, IfExprCondExprColumn(col 57:boolean, col 59:double, col 58:double)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 58:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 58:double) -> 59:double, ConstantVectorExpression(val 0.0) -> 58:double) -> 60:double, IfExprNullColumn(col 62:boolean, null, col 7)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 62:boolean, col 7:decimal(10,2)) -> 63:decimal(10,2), IfExprColumnNull(col 64:boolean, col 7:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 64:boolean, col 7:decimal(10,2)) -> 65:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 67:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 68:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 69:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 70:decimal(10,2), IfExprCondExprCondExpr(col 66:boolean, col 71:timestampcol 72:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 66:boolean, CastDateToTimestamp(col 12:date) -> 71:timestamp, CastDateToTimestamp(col 11:date) -> 72:timestamp) -> 73:timestamp, IfExprCondExprNull(col 74:boolean, col 75:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 74:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 75:int) -> 76:int, IfExprNullCondExpr(col 77:boolean, null, col 78:int)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 77:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 78:int) -> 79:int, IfExprLongScalarLongScalar(col 81:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 80:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 80:int) -> 81:boolean) -> 80:date + selectExpressions: IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 26:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, ConstantVectorExpression(val Single) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 19:boolean, ConstantVectorExpression(val Two) -> 20:string, IfExprColumnCondExpr(col 21:boolean, col 22:stringcol 24:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 21:boolean, ConstantVectorExpression(val Some) -> 22:string, IfExprStringScalarStringScalar(col 23:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 23:boolean) -> 24:string) -> 25:string) -> 26:string) -> 27:string, IfExprColumnCondExpr(col 23:boolean, col 28:stringcol 37:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 23:boolean, ConstantVectorExpression(val Single) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 36:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 29:boolean, ConstantVectorExpression(val Two) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 35:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 31:boolean, ConstantVectorExpression(val Some) -> 32:string, IfExprColumnNull(col 33:boolean, col 34:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 33:boolean, ConstantVectorExpression(val Many) -> 34:string) -> 35:string) -> 36:string) -> 37:string) -> 38:string, IfExprColumnCondExpr(col 39:boolean, col 40:stringcol 47:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 39:boolean, ConstantVectorExpression(val Single) -> 40:string, IfExprColumnCondExpr(col 41:boolean, col 42:stringcol 46:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 41:boolean, ConstantVectorExpression(val Two) -> 42:string, IfExprColumnCondExpr(col 43:boolean, col 44:stringcol 45:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 43:boolean, ConstantVectorExpression(val Some) -> 44:string, IfExprNullNull(null, null) -> 45:string) -> 46:string) -> 47:string) -> 48:string, IfExprCondExprCondExpr(col 49:boolean, col 50:datecol 51:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 49:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 50:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 51:date) -> 52:date, IfExprDoubleColumnLongScalar(col 57:boolean, col 58:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 54:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 54:double) -> 58:double) -> 54:double, IfExprCondExprColumn(col 57:boolean, col 59:double, col 58:double)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 58:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 58:double) -> 59:double, ConstantVectorExpression(val 0.0) -> 58:double) -> 60:double, IfExprNullColumn(col 62:boolean, null, col 82)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 62:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 82:decimal(10,2)) -> 63:decimal(10,2), IfExprColumnNull(col 64:boolean, col 83:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 64:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 83:decimal(10,2)) -> 65:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 67:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 68:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 69:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 70:decimal(10,2), IfExprCondExprCondExpr(col 66:boolean, col 71:timestampcol 72:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 66:boolean, CastDateToTimestamp(col 12:date) -> 71:timestamp, CastDateToTimestamp(col 11:date) -> 72:timestamp) -> 73:timestamp, IfExprCondExprNull(col 74:boolean, col 75:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 74:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 75:int) -> 76:int, IfExprNullCondExpr(col 77:boolean, null, col 78:int)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 77:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 78:int) -> 79:int, IfExprLongScalarLongScalar(col 81:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 80:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 80:int) -> 81:boolean) -> 80:date Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Reduce Sink Vectorization: - className: VectorReduceSinkOperator + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator native: false - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: date), _col5 (type: double), _col6 (type: double), _col7 (type: decimal(10,2)), _col8 (type: decimal(10,2)), _col9 (type: decimal(12,2)), _col10 (type: decimal(12,2)), _col11 (type: decimal(10,2)), _col12 (type: decimal(10,2)), _col13 (type: timestamp), _col14 (type: int), _col15 (type: int), _col16 (type: date) + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -910,25 +870,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 16 includeColumns: [1, 2, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14] - dataColumns: l_orderkey:int, l_partkey:int, l_suppkey:int, l_linenumber:int, l_quantity:int, l_extendedprice:double, l_discount:double, l_tax:decimal(10,2), l_returnflag:char(1), l_linestatus:char(1), l_shipdate:date, l_commitdate:date, l_receiptdate:date, l_shipinstruct:varchar(20), l_shipmode:char(10), l_comment:string + dataColumns: l_orderkey:int, l_partkey:int, l_suppkey:int, l_linenumber:int, l_quantity:int, l_extendedprice:double, l_discount:double, l_tax:decimal(10,2)/DECIMAL_64, l_returnflag:char(1), l_linestatus:char(1), l_shipdate:date, l_commitdate:date, l_receiptdate:date, l_shipinstruct:varchar(20), l_shipmode:char(10), l_comment:string partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, bigint, bigint, bigint, bigint, double, double, bigint, bigint, double, double, double, string, bigint, decimal(10,2), bigint, decimal(10,2), bigint, decimal(12,2), decimal(12,2), decimal(10,2), decimal(10,2), timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint] - Reduce Vectorization: - enabled: false - enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true - enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Reduce Operator Tree: - Select Operator - expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: date), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: decimal(10,2)), VALUE._col7 (type: decimal(10,2)), VALUE._col8 (type: decimal(12,2)), VALUE._col9 (type: decimal(12,2)), VALUE._col10 (type: decimal(10,2)), VALUE._col11 (type: decimal(10,2)), VALUE._col12 (type: timestamp), VALUE._col13 (type: int), VALUE._col14 (type: int), VALUE._col15 (type: date) - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + scratchColumnTypeNames: [bigint, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, bigint, bigint, bigint, bigint, double, double, bigint, bigint, double, double, double, string, bigint, decimal(10,2), bigint, decimal(10,2), bigint, decimal(12,2), decimal(12,2), decimal(10,2), decimal(10,2), timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint, decimal(10,2), decimal(10,2)] Stage: Stage-0 Fetch Operator @@ -975,7 +919,6 @@ PREHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity PREHOOK: type: QUERY PREHOOK: Input: default@lineitem_test #### A masked pattern was here #### @@ -1018,109 +961,108 @@ POSTHOOK: query: SELECT IF(L_SUPPKEY > 10000, NULL, DATEDIFF(L_RECEIPTDATE, L_COMMITDATE)) AS Field_11, IF(L_SUPPKEY % 500 > 100, DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365)) AS Field_12 FROM lineitem_test -ORDER BY Quantity POSTHOOK: type: QUERY POSTHOOK: Input: default@lineitem_test #### A masked pattern was here #### quantity quantity_description quantity_description_2 quantity_description_3 expected_date field_1 field_2 field_3 field_4 field_5 field_6 field_7 field_8 field_9 field_10 field_11 field_12 -NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 -1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 1 Single Single Single 1994-01-31 0.0 0.0 0.05 0.05 0.05 0.05 0.05 0.05 1994-01-28 00:00:00 NULL -36 2009-01-01 -2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 -2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 -3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 -3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 -3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 -4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 -4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 -5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 -5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 -5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 -6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 -6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 -7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 -8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 -8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 -9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +1 Single Single Single 1994-12-06 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-12-15 00:00:00 NULL 3 2009-01-01 11 Many Many NULL 1994-03-22 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1994-03-27 00:00:00 NULL 10 2009-01-01 12 Many Many NULL 1996-05-12 12655.998 12655.998 0.03 0.03 0.03 0.03 0.03 0.03 1996-06-03 00:00:00 NULL 82 2009-01-01 12 Many Many NULL 1997-02-01 12156.034800000001 12156.034800000001 0.05 NULL 0.05 0.00 0.05 0.00 1997-02-22 00:00:00 NULL 1 2009-01-01 +13 Many Many NULL 1993-04-06 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1993-04-08 00:00:00 NULL 4 2009-01-01 13 Many Many NULL 1994-03-08 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-03-26 00:00:00 NULL 41 2009-01-01 13 Many Many NULL 1998-10-28 17554.68 17554.68 0.07 NULL 0.07 0.00 0.07 0.00 1998-11-06 00:00:00 NULL 53 2009-01-01 -13 Many Many NULL 1993-04-06 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1993-04-08 00:00:00 NULL 4 2009-01-01 14 Many Many NULL 1995-01-04 0.0 0.0 0.02 NULL 0.02 0.00 0.02 0.00 1995-01-27 00:00:00 NULL 66 2009-01-01 15 Many Many NULL 1994-11-05 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-11-20 00:00:00 NULL 81 2009-12-31 -17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 17 Many Many NULL 1994-07-07 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-07-03 00:00:00 NULL -4 2009-01-01 +17 Many Many NULL 1996-03-18 20321.500799999998 20321.500799999998 NULL NULL 0.00 0.00 0.00 0.00 1996-03-22 00:00:00 NULL 39 2009-01-01 19 Many Many NULL 1993-05-19 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1993-05-25 00:00:00 NULL 81 2009-01-01 19 Many Many NULL 1994-02-05 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1994-02-06 00:00:00 NULL -11 2009-01-01 +2 Two Two Two 1993-12-09 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-01-01 00:00:00 NULL -6 2009-01-01 +2 Two Two Two 1995-08-12 2011.3912000000003 2011.3912000000003 NULL NULL 0.00 0.00 0.00 0.00 1995-08-23 00:00:00 NULL -45 2009-01-01 20 Many Many NULL 1998-07-02 32042.592 32042.592 0.01 NULL 0.01 0.00 0.01 0.00 1998-07-02 00:00:00 NULL 40 2009-01-01 -21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 21 Many Many NULL 1994-10-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-10-26 00:00:00 NULL 38 2009-01-01 -22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 +21 Many Many NULL 1995-07-11 24640.0518 24640.0518 NULL NULL 0.00 0.00 0.00 0.00 1995-07-31 00:00:00 NULL 78 2009-01-01 22 Many Many NULL 1995-07-22 39353.82 39353.82 0.05 NULL 0.05 0.00 0.05 0.00 1995-07-19 00:00:00 NULL 45 2009-01-01 -23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 -23 Many Many NULL 1994-10-13 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-10-24 00:00:00 NULL 79 2009-12-31 +22 Many Many NULL 1998-10-14 28405.0184 28405.0184 0.06 NULL 0.06 0.00 0.06 0.00 1998-10-12 00:00:00 NULL -4 2009-01-01 23 Many Many NULL 1994-07-24 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-07-25 00:00:00 NULL 26 2009-01-01 -24 Many Many NULL 1996-04-04 20542.032 20542.032 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-01 00:00:00 NULL 18 2009-12-31 +23 Many Many NULL 1994-10-13 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-10-24 00:00:00 NULL 79 2009-12-31 +23 Many Many NULL 1997-04-24 33946.3785 33946.3785 NULL NULL 0.00 0.00 0.00 0.00 1997-05-06 00:00:00 NULL 81 2009-01-01 24 Many Many NULL 1996-02-26 31762.584 31762.584 0.00 0.00 0.00 0.00 0.00 0.00 1996-03-18 00:00:00 NULL 75 2009-01-01 -25 Many Many NULL 1998-04-15 43064.1575 43064.1575 0.07 NULL 0.07 0.00 0.07 0.00 1998-04-11 00:00:00 NULL -11 2009-01-01 +24 Many Many NULL 1996-04-04 20542.032 20542.032 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-01 00:00:00 NULL 18 2009-12-31 25 Many Many NULL 1995-12-06 27263.995 27263.995 NULL NULL 0.00 0.00 0.00 0.00 1995-12-21 00:00:00 NULL -4 2009-01-01 -26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 -26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 -26 Many Many NULL 1994-10-21 0.0 0.0 0.08 NULL 0.08 0.00 0.08 0.00 1994-10-19 00:00:00 NULL 24 2009-01-01 +25 Many Many NULL 1998-04-15 43064.1575 43064.1575 0.07 NULL 0.07 0.00 0.07 0.00 1998-04-11 00:00:00 NULL -11 2009-01-01 26 Many Many NULL 1993-11-03 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1993-11-04 00:00:00 NULL -44 2009-01-01 +26 Many Many NULL 1994-10-21 0.0 0.0 0.08 NULL 0.08 0.00 0.08 0.00 1994-10-19 00:00:00 NULL 24 2009-01-01 +26 Many Many NULL 1995-04-25 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1995-05-13 00:00:00 NULL 18 2009-01-01 +26 Many Many NULL 1996-11-09 39912.433600000004 39912.433600000004 0.04 NULL 0.04 0.00 0.04 0.00 1996-11-20 00:00:00 NULL 31 2009-01-01 27 Many Many NULL 1994-01-26 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-01-23 00:00:00 NULL 62 2009-01-01 27 Many Many NULL 1998-06-29 45590.2425 45590.2425 NULL NULL 0.00 0.00 0.00 0.00 1998-06-29 00:00:00 NULL 4 2009-01-01 -28 Many Many NULL 1995-10-28 44866.219999999994 44866.219999999994 0.08 0.08 0.08 0.08 0.08 0.08 1995-10-26 00:00:00 NULL 60 2009-01-01 -28 Many Many NULL 1994-12-29 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1995-01-16 00:00:00 NULL 83 2009-01-01 -28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 -28 Many Many NULL 1996-03-26 30855.6612 30855.6612 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-20 00:00:00 NULL 12 2009-12-31 28 Many Many NULL 1993-12-19 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1994-01-01 00:00:00 NULL -9 2009-01-01 +28 Many Many NULL 1994-12-29 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1995-01-16 00:00:00 NULL 83 2009-01-01 +28 Many Many NULL 1995-10-28 44866.219999999994 44866.219999999994 0.08 0.08 0.08 0.08 0.08 0.08 1995-10-26 00:00:00 NULL 60 2009-01-01 28 Many Many NULL 1996-02-06 45975.3616 45975.3616 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-28 00:00:00 NULL 66 2009-01-01 +28 Many Many NULL 1996-03-26 30855.6612 30855.6612 0.04 NULL 0.04 0.00 0.04 0.00 1996-04-20 00:00:00 NULL 12 2009-12-31 +28 Many Many NULL 1996-04-26 26349.6324 26349.6324 0.06 NULL 0.06 0.00 0.06 0.00 1996-05-16 00:00:00 NULL 47 2009-01-01 29 Many Many NULL 1997-01-30 39341.806 39341.806 NULL NULL 0.00 0.00 0.00 0.00 1997-01-27 00:00:00 NULL 0 2009-01-01 +3 Some Some Some 1994-06-11 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-06-15 00:00:00 NULL -42 2009-12-31 +3 Some Some Some 1998-06-02 5137.6143 5137.6143 0.07 NULL 0.07 0.00 0.07 0.00 1998-06-02 00:00:00 NULL 60 2009-01-01 +3 Some Some Some 1998-07-09 2778.921 2778.921 0.02 NULL 0.02 0.00 0.02 0.00 1998-07-21 00:00:00 NULL 46 2009-12-31 30 Many Many NULL 1994-06-08 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-06-22 00:00:00 NULL 24 2009-01-01 30 Many Many NULL 1996-01-15 29770.173 29770.173 NULL NULL 0.00 0.00 0.00 0.00 1996-01-18 00:00:00 NULL 35 2009-12-31 30 Many Many NULL 1998-08-16 44561.46 44561.46 0.06 NULL 0.06 0.00 0.06 0.00 1998-08-14 00:00:00 NULL 34 2009-12-31 -31 Many Many NULL 1994-02-24 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-02-20 00:00:00 NULL -19 2009-01-01 31 Many Many NULL 1993-11-03 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1993-11-08 00:00:00 NULL -41 2009-01-01 -32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 +31 Many Many NULL 1994-02-24 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-02-20 00:00:00 NULL -19 2009-01-01 32 Many Many NULL 1993-12-14 0.0 0.0 0.05 NULL 0.05 0.00 0.05 0.00 1993-12-28 00:00:00 NULL -7 2009-12-31 32 Many Many NULL 1994-08-29 0.0 0.0 0.06 NULL 0.06 0.00 0.06 0.00 1994-08-31 00:00:00 NULL 14 2009-01-01 -32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 +32 Many Many NULL 1995-08-19 63313.3312 63313.3312 0.00 NULL 0.00 0.00 0.00 0.00 1995-08-27 00:00:00 NULL -41 2009-01-01 32 Many Many NULL 1996-02-04 46146.7488 46146.7488 NULL NULL 0.00 0.00 0.00 0.00 1996-02-03 00:00:00 NULL -4 2009-01-01 +32 Many Many NULL 1996-10-07 44955.15839999999 44955.15839999999 0.05 NULL 0.05 0.00 0.05 0.00 1996-10-14 00:00:00 NULL -66 2009-12-31 33 Many Many NULL 1998-04-17 54174.12 54174.12 0.01 NULL 0.01 0.00 0.01 0.00 1998-04-15 00:00:00 NULL 26 2009-01-01 -34 Many Many NULL 1998-03-10 56487.763199999994 56487.763199999994 NULL NULL 0.00 0.00 0.00 0.00 1998-03-30 00:00:00 NULL -23 2009-01-01 -34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 34 Many Many NULL 1995-11-13 60586.5448 60586.5448 0.06 NULL 0.06 0.00 0.06 0.00 1995-11-26 00:00:00 NULL -50 2009-01-01 +34 Many Many NULL 1996-01-27 63982.002400000005 63982.002400000005 NULL NULL 0.00 0.00 0.00 0.00 1996-01-27 00:00:00 NULL 21 2009-01-01 +34 Many Many NULL 1998-03-10 56487.763199999994 56487.763199999994 NULL NULL 0.00 0.00 0.00 0.00 1998-03-30 00:00:00 NULL -23 2009-01-01 35 Many Many NULL 1996-01-21 40475.225 40475.225 0.03 0.03 0.03 0.03 0.03 0.03 1996-01-22 00:00:00 NULL -32 2009-01-01 36 Many Many NULL 1996-04-17 41844.6756 41844.6756 0.06 0.06 0.06 0.06 0.06 0.06 1996-04-20 00:00:00 NULL 52 2009-01-01 -37 Many Many NULL 1994-02-18 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-02-21 00:00:00 NULL -23 2009-01-01 -37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 37 Many Many NULL 1992-05-02 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1992-05-02 00:00:00 NULL -13 2009-01-01 -38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 +37 Many Many NULL 1993-04-23 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1993-04-14 00:00:00 NULL 15 2009-12-31 +37 Many Many NULL 1994-02-18 0.0 0.0 0.04 NULL 0.04 0.00 0.04 0.00 1994-02-21 00:00:00 NULL -23 2009-01-01 38 Many Many NULL 1996-02-16 68028.3144 68028.3144 NULL NULL 0.00 0.00 0.00 0.00 1996-02-18 00:00:00 NULL -6 2009-01-01 -39 Many Many NULL 1998-02-03 45146.01 45146.01 NULL NULL 0.00 0.00 0.00 0.00 1998-02-18 00:00:00 NULL -48 2009-01-01 +38 Many Many NULL 1997-02-02 44694.46 44694.46 0.05 0.05 0.05 0.05 0.05 0.05 1997-02-02 00:00:00 NULL 19 2009-01-01 39 Many Many NULL 1992-07-07 0.0 0.0 0.02 0.02 0.02 0.02 0.02 0.02 1992-07-28 00:00:00 NULL -21 2009-01-01 -40 Many Many NULL 1996-12-13 51224.736 51224.736 0.05 NULL 0.05 0.00 0.05 0.00 1997-01-01 00:00:00 NULL 71 2009-01-01 +39 Many Many NULL 1998-02-03 45146.01 45146.01 NULL NULL 0.00 0.00 0.00 0.00 1998-02-18 00:00:00 NULL -48 2009-01-01 +4 Some Some Some 1995-08-09 5990.4936 5990.4936 0.03 NULL 0.03 0.00 0.03 0.00 1995-09-03 00:00:00 NULL -28 2009-01-01 +4 Some Some Some 1997-04-27 5669.7732000000005 5669.7732000000005 0.04 NULL 0.04 0.00 0.04 0.00 1997-04-20 00:00:00 NULL 79 2009-01-01 40 Many Many NULL 1992-07-26 0.0 0.0 0.03 NULL 0.03 0.00 0.03 0.00 1992-08-15 00:00:00 NULL 14 2009-01-01 -41 Many Many NULL 1998-07-04 47989.6144 47989.6144 0.08 NULL 0.08 0.00 0.08 0.00 1998-07-06 00:00:00 NULL 9 2009-01-01 -41 Many Many NULL 1994-02-26 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-03-18 00:00:00 NULL 17 2009-01-01 +40 Many Many NULL 1996-12-13 51224.736 51224.736 0.05 NULL 0.05 0.00 0.05 0.00 1997-01-01 00:00:00 NULL 71 2009-01-01 41 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-11 00:00:00 NULL -74 2009-01-01 +41 Many Many NULL 1994-02-26 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-03-18 00:00:00 NULL 17 2009-01-01 +41 Many Many NULL 1998-07-04 47989.6144 47989.6144 0.08 NULL 0.08 0.00 0.08 0.00 1998-07-06 00:00:00 NULL 9 2009-01-01 42 Many Many NULL 1994-08-05 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-28 00:00:00 NULL 33 2009-12-31 42 Many Many NULL 1996-02-13 68289.9672 68289.9672 0.00 NULL 0.00 0.00 0.00 0.00 1996-02-23 00:00:00 NULL 33 2009-01-01 -43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 43 Many Many NULL 1992-07-15 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1992-08-02 00:00:00 NULL 27 2009-01-01 -44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 -44 Many Many NULL 1996-10-04 80882.4192 80882.4192 0.02 NULL 0.02 0.00 0.02 0.00 1996-09-30 00:00:00 NULL -48 2009-01-01 +43 Many Many NULL 1996-10-22 62727.3207 62727.3207 0.01 NULL 0.01 0.00 0.01 0.00 1996-10-26 00:00:00 NULL -19 2009-12-31 44 Many Many NULL 1995-09-02 75106.658 75106.658 NULL NULL 0.00 0.00 0.00 0.00 1995-09-14 00:00:00 NULL 25 2009-01-01 +44 Many Many NULL 1996-10-04 80882.4192 80882.4192 0.02 NULL 0.02 0.00 0.02 0.00 1996-09-30 00:00:00 NULL -48 2009-01-01 44 Many Many NULL 1996-11-19 48941.692800000004 48941.692800000004 0.06 NULL 0.06 0.00 0.06 0.00 1996-12-12 00:00:00 NULL -3 2009-01-01 -45 Many Many NULL 1998-03-05 61489.35 61489.35 NULL NULL 0.00 0.00 0.00 0.00 1998-03-24 00:00:00 NULL 4 2009-01-01 +44 Many Many NULL 1997-03-23 60781.124800000005 60781.124800000005 NULL NULL 0.00 0.00 0.00 0.00 1997-04-13 00:00:00 NULL 74 2009-12-31 45 Many Many NULL 1994-02-07 0.0 0.0 0.00 NULL 0.00 0.00 0.00 0.00 1994-02-23 00:00:00 NULL 50 2009-01-01 +45 Many Many NULL 1998-03-05 61489.35 61489.35 NULL NULL 0.00 0.00 0.00 0.00 1998-03-24 00:00:00 NULL 4 2009-01-01 46 Many Many NULL 1996-01-20 73475.892 73475.892 0.07 NULL 0.07 0.00 0.07 0.00 1996-02-03 00:00:00 NULL -53 2009-01-01 +46 Many Many NULL 1996-10-01 77781.4092 77781.4092 NULL NULL 0.00 0.00 0.00 0.00 1996-10-26 00:00:00 NULL -54 2009-01-01 46 Many Many NULL 1998-07-01 56583.5144 56583.5144 0.05 NULL 0.05 0.00 0.05 0.00 1998-07-05 00:00:00 NULL 28 2009-01-01 46 Many Many NULL 1998-08-18 84565.5168 84565.5168 0.05 NULL 0.05 0.00 0.05 0.00 1998-08-29 00:00:00 NULL 52 2009-01-01 -46 Many Many NULL 1996-10-01 77781.4092 77781.4092 NULL NULL 0.00 0.00 0.00 0.00 1996-10-26 00:00:00 NULL -54 2009-01-01 48 Many Many NULL 1994-08-22 0.0 0.0 0.07 NULL 0.07 0.00 0.07 0.00 1994-09-08 00:00:00 NULL 28 2009-01-01 49 Many Many NULL 1993-11-14 0.0 0.0 0.00 0.00 0.00 0.00 0.00 0.00 1993-11-24 00:00:00 NULL -26 2009-12-31 +5 Some Some Some 1993-12-14 0.0 0.0 0.03 0.03 0.03 0.03 0.03 0.03 1993-12-23 00:00:00 NULL -2 2009-01-01 +5 Some Some Some 1996-02-15 6217.103999999999 6217.103999999999 0.02 NULL 0.02 0.00 0.02 0.00 1996-02-13 00:00:00 NULL -42 2009-01-01 +5 Some Some Some 1997-02-25 8116.96 8116.96 NULL NULL 0.00 0.00 0.00 0.00 1997-02-21 00:00:00 NULL 9 2009-01-01 50 Many Many NULL 1994-08-13 0.0 0.0 NULL NULL 0.00 0.00 0.00 0.00 1994-08-26 00:00:00 NULL -48 2009-12-31 +6 Some Some Some 1995-07-26 8793.2736 8793.2736 0.03 NULL 0.03 0.00 0.03 0.00 1995-07-25 00:00:00 NULL -60 2009-01-01 +6 Some Some Some 1998-11-04 9487.6152 9487.6152 0.06 NULL 0.06 0.00 0.06 0.00 1998-11-05 00:00:00 NULL 46 2009-12-31 +7 Some Some Some 1996-01-24 12613.136199999999 12613.136199999999 0.04 NULL 0.04 0.00 0.04 0.00 1996-01-29 00:00:00 NULL 38 2009-01-01 +8 Some Some Some 1994-01-17 0.0 0.0 0.08 0.08 0.08 0.08 0.08 0.08 1994-01-14 00:00:00 NULL -44 2009-01-01 +8 Some Some Some 1996-02-03 11978.640000000001 11978.640000000001 0.02 0.02 0.02 0.02 0.02 0.02 1996-01-31 00:00:00 NULL -34 2009-01-01 +9 Some Some Some 1996-02-11 10666.6272 10666.6272 0.08 0.08 0.08 0.08 0.08 0.08 1996-02-19 00:00:00 NULL -12 2009-01-01 +NULL Huge number NULL NULL NULL 0.0 0.0 NULL NULL NULL 0.00 NULL 0.00 NULL NULL NULL 2009-12-31 diff --git a/ql/src/test/results/clientpositive/vector_case_when_2.q.out b/ql/src/test/results/clientpositive/vector_case_when_2.q.out index b8a5214..76c7f3d 100644 --- a/ql/src/test/results/clientpositive/vector_case_when_2.q.out +++ b/ql/src/test/results/clientpositive/vector_case_when_2.q.out @@ -392,8 +392,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -651,8 +651,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/vector_cast_constant.q.out b/ql/src/test/results/clientpositive/vector_cast_constant.q.out index d8a534f..8c596a6 100644 --- a/ql/src/test/results/clientpositive/vector_cast_constant.q.out +++ b/ql/src/test/results/clientpositive/vector_cast_constant.q.out @@ -165,8 +165,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_char_2.q.out b/ql/src/test/results/clientpositive/vector_char_2.q.out index 97038ee..dc2c1e4 100644 --- a/ql/src/test/results/clientpositive/vector_char_2.q.out +++ b/ql/src/test/results/clientpositive/vector_char_2.q.out @@ -126,8 +126,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -314,8 +314,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_char_4.q.out b/ql/src/test/results/clientpositive/vector_char_4.q.out index 5b9f272..8d27537 100644 --- a/ql/src/test/results/clientpositive/vector_char_4.q.out +++ b/ql/src/test/results/clientpositive/vector_char_4.q.out @@ -174,8 +174,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out b/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out index c98bb44..25199b8 100644 --- a/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out +++ b/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out @@ -213,8 +213,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -346,8 +346,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -481,8 +481,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_char_simple.q.out b/ql/src/test/results/clientpositive/vector_char_simple.q.out index 43c3e48..a845c47 100644 --- a/ql/src/test/results/clientpositive/vector_char_simple.q.out +++ b/ql/src/test/results/clientpositive/vector_char_simple.q.out @@ -70,8 +70,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -147,8 +147,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -236,8 +236,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_coalesce.q.out b/ql/src/test/results/clientpositive/vector_coalesce.q.out index d1b12e6..0d20a0e 100644 --- a/ql/src/test/results/clientpositive/vector_coalesce.q.out +++ b/ql/src/test/results/clientpositive/vector_coalesce.q.out @@ -42,8 +42,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -127,8 +127,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -213,8 +213,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -293,8 +293,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -379,8 +379,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -458,8 +458,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_coalesce_2.q.out b/ql/src/test/results/clientpositive/vector_coalesce_2.q.out index c42d295..6030750 100644 --- a/ql/src/test/results/clientpositive/vector_coalesce_2.q.out +++ b/ql/src/test/results/clientpositive/vector_coalesce_2.q.out @@ -93,8 +93,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -193,8 +193,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -288,8 +288,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -388,8 +388,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_coalesce_3.q.out b/ql/src/test/results/clientpositive/vector_coalesce_3.q.out index 6b7e21b..884078d 100644 --- a/ql/src/test/results/clientpositive/vector_coalesce_3.q.out +++ b/ql/src/test/results/clientpositive/vector_coalesce_3.q.out @@ -145,8 +145,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_coalesce_4.q.out b/ql/src/test/results/clientpositive/vector_coalesce_4.q.out index f359b22..c7c0da6 100644 --- a/ql/src/test/results/clientpositive/vector_coalesce_4.q.out +++ b/ql/src/test/results/clientpositive/vector_coalesce_4.q.out @@ -73,8 +73,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_count.q.out b/ql/src/test/results/clientpositive/vector_count.q.out index 85d5926..10a144a 100644 --- a/ql/src/test/results/clientpositive/vector_count.q.out +++ b/ql/src/test/results/clientpositive/vector_count.q.out @@ -242,8 +242,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -327,8 +327,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_data_types.q.out b/ql/src/test/results/clientpositive/vector_data_types.q.out index b72340d..dad2abd 100644 --- a/ql/src/test/results/clientpositive/vector_data_types.q.out +++ b/ql/src/test/results/clientpositive/vector_data_types.q.out @@ -244,8 +244,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -363,8 +363,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/vector_date_1.q.out b/ql/src/test/results/clientpositive/vector_date_1.q.out index affd786..c2e9e5d 100644 --- a/ql/src/test/results/clientpositive/vector_date_1.q.out +++ b/ql/src/test/results/clientpositive/vector_date_1.q.out @@ -131,8 +131,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -270,8 +270,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -409,8 +409,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -548,8 +548,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -691,8 +691,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -836,8 +836,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -943,8 +943,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_decimal_1.q.out b/ql/src/test/results/clientpositive/vector_decimal_1.q.out index 80def64..ee6895b 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_1.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_1.q.out @@ -66,15 +66,15 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToBoolean(t) (type: boolean) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToBoolean(col 0:decimal(4,2)) -> 4:boolean + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToBoolean(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:boolean Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) @@ -89,8 +89,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -98,9 +98,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(4,2), bigint] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -157,15 +157,15 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToByte(t) (type: tinyint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToLong(col 0:decimal(4,2)) -> 4:tinyint + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToLong(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:tinyint Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: tinyint) @@ -180,8 +180,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -189,9 +189,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(4,2), bigint] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -248,15 +248,15 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToShort(t) (type: smallint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToLong(col 0:decimal(4,2)) -> 4:smallint + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToLong(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:smallint Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: smallint) @@ -271,8 +271,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -280,9 +280,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(4,2), bigint] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -339,15 +339,15 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToInteger(t) (type: int) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToLong(col 0:decimal(4,2)) -> 4:int + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToLong(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:int Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) @@ -362,8 +362,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -371,9 +371,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(4,2), bigint] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -430,15 +430,15 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToLong(t) (type: bigint) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToLong(col 0:decimal(4,2)) -> 4:bigint + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToLong(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:bigint Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) @@ -453,8 +453,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -462,9 +462,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] + scratchColumnTypeNames: [decimal(4,2), bigint] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -521,15 +521,15 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToFloat(t) (type: float) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToFloat(col 0:decimal(4,2)) -> 4:float + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToFloat(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:float Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: float) @@ -544,8 +544,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -553,9 +553,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [double] + scratchColumnTypeNames: [decimal(4,2), double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -612,15 +612,15 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToDouble(t) (type: double) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToDouble(col 0:decimal(4,2)) -> 4:double + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToDouble(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:double Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) @@ -635,8 +635,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -644,9 +644,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [double] + scratchColumnTypeNames: [decimal(4,2), double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -703,15 +703,15 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: UDFToString(t) (type: string) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToString(col 0:decimal(4,2)) -> 4:string + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToString(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:string Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) @@ -726,8 +726,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -735,9 +735,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [string] + scratchColumnTypeNames: [decimal(4,2), string] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -794,15 +794,15 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:decimal(4,2), 1:u:decimal(5,0), 2:v:decimal(10,0), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: CAST( t AS TIMESTAMP) (type: timestamp) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4] - selectExpressions: CastDecimalToTimestamp(col 0:decimal(4,2)) -> 4:timestamp + projectedOutputColumnNums: [5] + selectExpressions: CastDecimalToTimestamp(col 4:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(4,2)/DECIMAL_64) -> 4:decimal(4,2)) -> 5:timestamp Statistics: Num rows: 2 Data size: 336 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: timestamp) @@ -817,8 +817,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -826,9 +826,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0] - dataColumns: t:decimal(4,2), u:decimal(5,0), v:decimal(10,0) + dataColumns: t:decimal(4,2)/DECIMAL_64, u:decimal(5,0)/DECIMAL_64, v:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [timestamp] + scratchColumnTypeNames: [decimal(4,2), timestamp] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git a/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out b/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out index acb62df..6526abe 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out @@ -56,7 +56,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)) outputColumnNames: _col0 @@ -78,8 +78,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -87,7 +87,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/vector_decimal_5.q.out b/ql/src/test/results/clientpositive/vector_decimal_5.q.out index 0bfd12e..f3e6b72 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_5.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_5.q.out @@ -129,6 +129,40 @@ NULL 124.00000 125.20000 200.00000 +PREHOOK: query: explain SELECT cast(key as decimal) FROM DECIMAL_5 +PREHOOK: type: QUERY +POSTHOOK: query: explain SELECT cast(key as decimal) FROM DECIMAL_5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_5 + Statistics: Num rows: 38 Data size: 4072 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: CAST( key AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4072 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4072 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT cast(key as decimal) FROM DECIMAL_5 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_5 @@ -137,42 +171,42 @@ POSTHOOK: query: SELECT cast(key as decimal) FROM DECIMAL_5 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_5 #### A masked pattern was here #### --4400 +-440000000 NULL 0 0 -100 -10 -1 -0 -0 -200 -20 -2 -0 -0 -0 +10000000 +1000000 +100000 +10000 +1000 +20000000 +2000000 +200000 0 -0 -0 -0 -0 -0 -1 -2 -3 --1 --1 --1 -1 -1 -124 -125 --1255 -3 -3 -3 -1 +20000 +2000 +30000 +33000 +33300 +-30000 +-33000 +-33300 +100000 +200000 +314000 +-112000 +-112000 +-112200 +112000 +112200 +12400000 +12520000 +-125549000 +314000 +314000 +314000 +100000 NULL NULL PREHOOK: query: SELECT cast(key as decimal(6,3)) FROM DECIMAL_5 @@ -187,38 +221,38 @@ NULL NULL 0.000 0.000 +NULL +NULL 100.000 10.000 1.000 -0.100 -0.010 +NULL +NULL 200.000 -20.000 -2.000 0.000 -0.200 -0.020 -0.300 -0.330 -0.333 --0.300 --0.330 --0.333 -1.000 +20.000 2.000 -3.140 --1.120 --1.120 --1.122 -1.120 -1.122 -124.000 -125.200 -NULL -3.140 -3.140 -3.140 -1.000 +30.000 +33.000 +33.300 +-30.000 +-33.000 +-33.300 +100.000 +200.000 +314.000 +-112.000 +-112.000 +-112.200 +112.000 +112.200 +NULL +NULL +NULL +314.000 +314.000 +314.000 +100.000 NULL NULL PREHOOK: query: DROP TABLE DECIMAL_5_txt diff --git a/ql/src/test/results/clientpositive/vector_decimal_6.q.out b/ql/src/test/results/clientpositive/vector_decimal_6.q.out index 2bc955a..445896b 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_6.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_6.q.out @@ -129,7 +129,7 @@ STAGE PLANS: Statistics: Num rows: 27 Data size: 2572 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(10,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(10,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: key (type: decimal(10,5)), value (type: int) outputColumnNames: _col0, _col1 @@ -151,8 +151,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -160,7 +160,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(10,5), value:int + dataColumns: key:decimal(10,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Vectorization: @@ -244,7 +244,7 @@ STAGE PLANS: Statistics: Num rows: 27 Data size: 3020 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(17,4), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(17,4)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: key (type: decimal(17,4)), value (type: int) outputColumnNames: _col0, _col1 @@ -266,8 +266,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -275,7 +275,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(17,4), value:int + dataColumns: key:decimal(17,4)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Vectorization: @@ -511,7 +511,7 @@ STAGE PLANS: Statistics: Num rows: 27 Data size: 2572 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(10,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(10,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Select Operator expressions: (key + 5.5) (type: decimal(11,5)), (value * 11) (type: int) outputColumnNames: _col0, _col1 @@ -519,7 +519,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3, 4] - selectExpressions: DecimalColAddDecimalScalar(col 0:decimal(10,5), val 5.5) -> 3:decimal(11,5), LongColMultiplyLongScalar(col 1:int, val 11) -> 4:int + selectExpressions: Decimal64ColAddDecimal64Scalar(col 0:decimal(10,5)/DECIMAL_64, decimal64Val 550000, decimalVal 5.5) -> 3:decimal(11,5)/DECIMAL_64, LongColMultiplyLongScalar(col 1:int, val 11) -> 4:int Statistics: Num rows: 27 Data size: 2572 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: int) @@ -535,8 +535,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -544,9 +544,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(10,5), value:int + dataColumns: key:decimal(10,5)/DECIMAL_64, value:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,5), bigint] + scratchColumnTypeNames: [decimal(11,5)/DECIMAL_64, bigint] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git a/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out index 04c534e..b9f4444 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out @@ -103,8 +103,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -245,8 +245,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_decimal_cast.q.out b/ql/src/test/results/clientpositive/vector_decimal_cast.q.out index 0850648..1c9ca38 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_cast.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_cast.q.out @@ -56,8 +56,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out index c296c30..674d3f7 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out @@ -86,8 +86,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -199,12 +199,12 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 2127808 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(10,3), 2:cdecimal2:decimal(7,2), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(10,3)/DECIMAL_64, 2:cdecimal2:decimal(7,2)/DECIMAL_64, 3:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterDecimalColGreaterDecimalScalar(col 1:decimal(10,3), val 0), FilterDecimalColLessDecimalScalar(col 1:decimal(10,3), val 12345.5678), FilterDecimalColNotEqualDecimalScalar(col 2:decimal(7,2), val 0), FilterDecimalColGreaterDecimalScalar(col 2:decimal(7,2), val 1000), SelectColumnIsNotNull(col 0:double)) + predicateExpression: FilterExprAndExpr(children: FilterDecimal64ColGreaterDecimal64Scalar(col 1:decimal(10,3)/DECIMAL_64, val 0), FilterDecimalColLessDecimalScalar(col 4:decimal(10,3), val 12345.5678)(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)), FilterDecimal64ColNotEqualDecimal64Scalar(col 2:decimal(7,2)/DECIMAL_64, val 0), FilterDecimal64ColGreaterDecimal64Scalar(col 2:decimal(7,2)/DECIMAL_64, val 100000), SelectColumnIsNotNull(col 0:double)) predicate: ((cdecimal1 < 12345.5678) and (cdecimal1 > 0) and (cdecimal2 <> 0) and (cdecimal2 > 1000) and cdouble is not null) (type: boolean) Statistics: Num rows: 455 Data size: 78788 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -213,8 +213,8 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [4, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20] - selectExpressions: DecimalColAddDecimalColumn(col 1:decimal(10,3), col 2:decimal(7,2)) -> 4:decimal(11,3), DecimalColSubtractDecimalColumn(col 1:decimal(10,3), col 5:decimal(9,2))(children: DecimalScalarMultiplyDecimalColumn(val 2, col 2:decimal(7,2)) -> 5:decimal(9,2)) -> 6:decimal(11,3), DecimalColDivideDecimalColumn(col 7:decimal(11,3), col 2:decimal(7,2))(children: DecimalColAddDecimalScalar(col 1:decimal(10,3), val 2.34) -> 7:decimal(11,3)) -> 8:decimal(21,11), DecimalColMultiplyDecimalColumn(col 1:decimal(10,3), col 9:decimal(12,6))(children: DecimalColDivideDecimalScalar(col 2:decimal(7,2), val 3.4) -> 9:decimal(12,6)) -> 10:decimal(23,9), DecimalColModuloDecimalScalar(col 1:decimal(10,3), val 10) -> 11:decimal(5,3), CastDecimalToLong(col 1:decimal(10,3)) -> 12:int, CastDecimalToLong(col 2:decimal(7,2)) -> 13:smallint, CastDecimalToLong(col 2:decimal(7,2)) -> 14:tinyint, CastDecimalToLong(col 1:decimal(10,3)) -> 15:bigint, CastDecimalToBoolean(col 1:decimal(10,3)) -> 16:boolean, CastDecimalToDouble(col 2:decimal(7,2)) -> 17:double, CastDecimalToFloat(col 1:decimal(10,3)) -> 18:float, CastDecimalToString(col 2:decimal(7,2)) -> 19:string, CastDecimalToTimestamp(col 1:decimal(10,3)) -> 20:timestamp + projectedOutputColumnNums: [6, 8, 10, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22] + selectExpressions: DecimalColAddDecimalColumn(col 4:decimal(10,3), col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 6:decimal(11,3), DecimalColSubtractDecimalColumn(col 4:decimal(10,3), col 7:decimal(9,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3), DecimalScalarMultiplyDecimalColumn(val 2, col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 7:decimal(9,2)) -> 8:decimal(11,3), DecimalColDivideDecimalColumn(col 23:decimal(11,3), col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 9:decimal(11,3)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,3)/DECIMAL_64, decimal64Val 2340, decimalVal 2.34) -> 9:decimal(11,3)/DECIMAL_64) -> 23:decimal(11,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 10:decimal(21,11), DecimalColMultiplyDecimalColumn(col 4:decimal(10,3), col 11:decimal(12,6))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3), DecimalColDivideDecimalScalar(col 5:decimal(7,2), val 3.4)(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 11:decimal(12,6)) -> 12:decimal(23,9), DecimalColModuloDecimalScalar(col 4:decimal(10,3), val 10)(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 13:decimal(5,3), CastDecimalToLong(col 4:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 14:int, CastDecimalToLong(col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 15:smallint, CastDecimalToLong(col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 16:tinyint, CastDecimalToLong(col 4:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 17:bigint, CastDecimalToBoolean(col 4:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 18:boolean, CastDecimalToDouble(col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 19:double, CastDecimalToFloat(col 4:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 20:float, CastDecimalToString(col 5:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(7,2)) -> 21:string, CastDecimalToTimestamp(col 4:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 4:decimal(10,3)) -> 22:timestamp Statistics: Num rows: 455 Data size: 78788 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(11,3)), _col1 (type: decimal(11,3)), _col2 (type: decimal(21,11)), _col3 (type: decimal(23,9)), _col4 (type: decimal(5,3)), _col5 (type: int), _col6 (type: smallint), _col7 (type: tinyint), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: double), _col11 (type: float), _col12 (type: string), _col13 (type: timestamp) @@ -230,8 +230,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -239,9 +239,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0, 1, 2] - dataColumns: cdouble:double, cdecimal1:decimal(10,3), cdecimal2:decimal(7,2) + dataColumns: cdouble:double, cdecimal1:decimal(10,3)/DECIMAL_64, cdecimal2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,3), decimal(9,2), decimal(11,3), decimal(11,3), decimal(21,11), decimal(12,6), decimal(23,9), decimal(5,3), bigint, bigint, bigint, bigint, bigint, double, double, string, timestamp] + scratchColumnTypeNames: [decimal(10,3), decimal(7,2), decimal(11,3), decimal(9,2), decimal(11,3), decimal(11,3)/DECIMAL_64, decimal(21,11), decimal(12,6), decimal(23,9), decimal(5,3), bigint, bigint, bigint, bigint, bigint, double, double, string, timestamp, decimal(11,3)] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -302,4 +302,4 @@ ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_test_small_n0 #### A masked pattern was here #### -774841630076 +1273824888155 diff --git a/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out b/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out index 36e8810..bf3cf93 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out @@ -167,8 +167,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -304,6 +304,17 @@ POSTHOOK: Input: default@t2_n29 9.00 9 9.00 9 9.00 9 +PREHOOK: query: select count(*) from (select t1_n48.`dec`, t2_n29.`dec` from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n48 +PREHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_n48.`dec`, t2_n29.`dec` from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n48 +POSTHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +106 PREHOOK: query: explain vectorization detail select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`) PREHOOK: type: QUERY @@ -397,8 +408,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -534,6 +545,17 @@ POSTHOOK: Input: default@t2_n29 9.00 48.96 9 5 9.00 48.96 9 7 9.00 48.96 9 7 +PREHOOK: query: select count(*) from (select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n48 +PREHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_n48.`dec`, t1_n48.value_dec, t2_n29.`dec`, t2_n29.value_dec from t1_n48 join t2_n29 on (t1_n48.`dec`=t2_n29.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n48 +POSTHOOK: Input: default@t2_n29 +#### A masked pattern was here #### +106 PREHOOK: query: CREATE TABLE over1k_small(t tinyint, si smallint, i int, @@ -735,6 +757,17 @@ POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### 89.00 89 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 PREHOOK: query: explain vectorization detail select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`) PREHOOK: type: QUERY @@ -860,6 +893,17 @@ POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### 89.00 15.09 89 15 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 PREHOOK: query: explain vectorization detail select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`) PREHOOK: type: QUERY @@ -986,6 +1030,17 @@ POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### 89.00 89 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t2_small.`dec` from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 PREHOOK: query: explain vectorization detail select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`) PREHOOK: type: QUERY @@ -1112,3 +1167,14 @@ POSTHOOK: Input: default@t1_small POSTHOOK: Input: default@t2_small #### A masked pattern was here #### 89.00 15.09 89 15 +PREHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_small +PREHOOK: Input: default@t2_small +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from (select t1_small.`dec`, t1_small.value_dec, t2_small.`dec`, t2_small.value_dec from t1_small join t2_small on (t1_small.`dec`=t2_small.`dec`)) as t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_small +POSTHOOK: Input: default@t2_small +#### A masked pattern was here #### +1 diff --git a/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out b/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out index 87596e7..417df95 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out @@ -146,8 +146,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -357,12 +357,12 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 2201192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:cbigint:bigint, 1:cdouble:double, 2:cdecimal1:decimal(12,4), 3:cdecimal2:decimal(14,8), 4:ROW__ID:struct] + vectorizationSchemaColumns: [0:cbigint:bigint, 1:cdouble:double, 2:cdecimal1:decimal(12,4)/DECIMAL_64, 3:cdecimal2:decimal(14,8)/DECIMAL_64, 4:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterLongColEqualLongScalar(col 5:bigint, val 0)(children: LongColModuloLongScalar(col 0:bigint, val 500) -> 5:bigint), FilterDoubleColGreaterEqualDoubleScalar(col 7:double, val -1.0)(children: FuncSinDoubleToDouble(col 6:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 6:double) -> 7:double)) + predicateExpression: FilterExprAndExpr(children: FilterLongColEqualLongScalar(col 5:bigint, val 0)(children: LongColModuloLongScalar(col 0:bigint, val 500) -> 5:bigint), FilterDoubleColGreaterEqualDoubleScalar(col 8:double, val -1.0)(children: FuncSinDoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 7:double) -> 8:double)) predicate: (((cbigint % 500) = 0) and (sin(cdecimal1) >= -1.0D)) (type: boolean) Statistics: Num rows: 2048 Data size: 366865 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -371,8 +371,8 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2, 8, 9, 10, 11, 6, 12, 13, 14, 16, 17, 7, 18, 20, 21, 22, 23, 24, 25, 26, 27, 28, 2, 29, 5, 30] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(12,4), decimalPlaces 2) -> 8:decimal(11,2), FuncRoundDecimalToDecimal(col 2:decimal(12,4)) -> 9:decimal(9,0), FuncFloorDecimalToDecimal(col 2:decimal(12,4)) -> 10:decimal(9,0), FuncCeilDecimalToDecimal(col 2:decimal(12,4)) -> 11:decimal(9,0), RoundWithNumDigitsDoubleToDouble(col 7, decimalPlaces 58)(children: FuncExpDoubleToDouble(col 6:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 6:double) -> 7:double) -> 6:double, FuncLnDoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 7:double) -> 12:double, FuncLog10DoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 7:double) -> 13:double, FuncLog2DoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 7:double) -> 14:double, FuncLog2DoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 15:decimal(13,4))(children: DecimalColSubtractDecimalScalar(col 2:decimal(12,4), val 15601) -> 15:decimal(13,4)) -> 7:double) -> 16:double, FuncLogWithBaseDoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 7:double) -> 17:double, FuncPowerDoubleToDouble(col 18:double)(children: FuncLog2DoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 7:double) -> 18:double) -> 7:double, FuncPowerDoubleToDouble(col 19:double)(children: FuncLog2DoubleToDouble(col 18:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 18:double) -> 19:double) -> 18:double, FuncSqrtDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 20:double, FuncAbsDecimalToDecimal(col 2:decimal(12,4)) -> 21:decimal(12,4), FuncSinDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 22:double, FuncASinDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 23:double, FuncCosDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 24:double, FuncACosDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 25:double, FuncATanDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 26:double, FuncDegreesDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 27:double, FuncRadiansDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 28:double, FuncNegateDecimalToDecimal(col 2:decimal(12,4)) -> 29:decimal(12,4), FuncSignDecimalToLong(col 2:decimal(12,4)) -> 5:int, FuncCosDoubleToDouble(col 19:double)(children: DoubleColAddDoubleScalar(col 30:double, val 3.14159)(children: DoubleColUnaryMinus(col 19:double)(children: FuncSinDoubleToDouble(col 30:double)(children: FuncLnDoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 2:decimal(12,4)) -> 19:double) -> 30:double) -> 19:double) -> 30:double) -> 19:double) -> 30:double + projectedOutputColumnNums: [2, 9, 10, 11, 12, 7, 13, 14, 15, 17, 18, 8, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 2, 30, 5, 31] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 6:decimal(12,4), decimalPlaces 2)(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 9:decimal(11,2), FuncRoundDecimalToDecimal(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 10:decimal(9,0), FuncFloorDecimalToDecimal(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 11:decimal(9,0), FuncCeilDecimalToDecimal(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 12:decimal(9,0), RoundWithNumDigitsDoubleToDouble(col 8, decimalPlaces 58)(children: FuncExpDoubleToDouble(col 7:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 7:double) -> 8:double) -> 7:double, FuncLnDoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 8:double) -> 13:double, FuncLog10DoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 8:double) -> 14:double, FuncLog2DoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 8:double) -> 15:double, FuncLog2DoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 32:decimal(13,4))(children: ConvertDecimal64ToDecimal(col 16:decimal(13,4)/DECIMAL_64)(children: Decimal64ColSubtractDecimal64Scalar(col 2:decimal(12,4)/DECIMAL_64, decimal64Val 156010000, decimalVal 15601) -> 16:decimal(13,4)/DECIMAL_64) -> 32:decimal(13,4)) -> 8:double) -> 17:double, FuncLogWithBaseDoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 8:double) -> 18:double, FuncPowerDoubleToDouble(col 19:double)(children: FuncLog2DoubleToDouble(col 8:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 8:double) -> 19:double) -> 8:double, FuncPowerDoubleToDouble(col 20:double)(children: FuncLog2DoubleToDouble(col 19:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 19:double) -> 20:double) -> 19:double, FuncSqrtDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 21:double, FuncAbsDecimalToDecimal(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 22:decimal(12,4), FuncSinDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 23:double, FuncASinDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 24:double, FuncCosDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 25:double, FuncACosDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 26:double, FuncATanDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 27:double, FuncDegreesDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 28:double, FuncRadiansDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 29:double, FuncNegateDecimalToDecimal(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 30:decimal(12,4), FuncSignDecimalToLong(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 5:int, FuncCosDoubleToDouble(col 20:double)(children: DoubleColAddDoubleScalar(col 31:double, val 3.14159)(children: DoubleColUnaryMinus(col 20:double)(children: FuncSinDoubleToDouble(col 31:double)(children: FuncLnDoubleToDouble(col 20:double)(children: CastDecimalToDouble(col 6:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 6:decimal(12,4)) -> 20:double) -> 31:double) -> 20:double) -> 31:double) -> 20:double) -> 31:double Statistics: Num rows: 2048 Data size: 366865 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -388,8 +388,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -397,9 +397,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 4 includeColumns: [0, 2] - dataColumns: cbigint:bigint, cdouble:double, cdecimal1:decimal(12,4), cdecimal2:decimal(14,8) + dataColumns: cbigint:bigint, cdouble:double, cdecimal1:decimal(12,4)/DECIMAL_64, cdecimal2:decimal(14,8)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, double, double, decimal(11,2), decimal(9,0), decimal(9,0), decimal(9,0), double, double, double, decimal(13,4), double, double, double, double, double, decimal(12,4), double, double, double, double, double, double, double, decimal(12,4), double] + scratchColumnTypeNames: [bigint, decimal(12,4), double, double, decimal(11,2), decimal(9,0), decimal(9,0), decimal(9,0), double, double, double, decimal(13,4)/DECIMAL_64, double, double, double, double, double, decimal(12,4), double, double, double, double, double, double, double, decimal(12,4), double, decimal(13,4)] Stage: Stage-0 Fetch Operator diff --git a/ql/src/test/results/clientpositive/vector_decimal_precision.q.out b/ql/src/test/results/clientpositive/vector_decimal_precision.q.out index fd6d9c3..e3e354f 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_precision.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_precision.q.out @@ -604,8 +604,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_decimal_round.q.out b/ql/src/test/results/clientpositive/vector_decimal_round.q.out index cdf0ba4..d690579 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_round.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_round.q.out @@ -473,15 +473,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col1 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -497,8 +497,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -506,9 +506,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -564,15 +564,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col2 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col2 (type: decimal(11,0)) @@ -588,8 +588,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -597,9 +597,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git a/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out b/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out index 4140393..446fa37 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out @@ -81,8 +81,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -223,8 +223,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -392,8 +392,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -550,8 +550,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out b/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out index 284e71a..f25b0b2 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out @@ -88,7 +88,7 @@ STAGE PLANS: Statistics: Num rows: 30 Data size: 4712 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:id:int, 1:a:decimal(10,4), 2:b:decimal(15,8), 3:ROW__ID:struct] + vectorizationSchemaColumns: [0:id:int, 1:a:decimal(10,4)/DECIMAL_64, 2:b:decimal(15,8)/DECIMAL_64, 3:ROW__ID:struct] Select Operator expressions: id (type: int), a (type: decimal(10,4)), b (type: decimal(15,8)) outputColumnNames: _col0, _col1, _col2 @@ -111,8 +111,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -120,7 +120,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 3 includeColumns: [0, 1, 2] - dataColumns: id:int, a:decimal(10,4), b:decimal(15,8) + dataColumns: id:int, a:decimal(10,4)/DECIMAL_64, b:decimal(15,8)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/vector_decimal_udf2.q.out b/ql/src/test/results/clientpositive/vector_decimal_udf2.q.out index c1eddca..089ef99 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_udf2.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_udf2.q.out @@ -83,12 +83,12 @@ STAGE PLANS: Statistics: Num rows: 39 Data size: 4072 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 19 Data size: 1983 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -114,8 +114,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -123,7 +123,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double] @@ -188,12 +188,12 @@ STAGE PLANS: Statistics: Num rows: 39 Data size: 4072 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 19 Data size: 1983 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -219,8 +219,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -228,7 +228,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double, double] diff --git a/ql/src/test/results/clientpositive/vector_delete_orig_table.q.out b/ql/src/test/results/clientpositive/vector_delete_orig_table.q.out index 4ce897e..5d7f310 100644 --- a/ql/src/test/results/clientpositive/vector_delete_orig_table.q.out +++ b/ql/src/test/results/clientpositive/vector_delete_orig_table.q.out @@ -87,8 +87,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_distinct_2.q.out b/ql/src/test/results/clientpositive/vector_distinct_2.q.out index 41c61ce..8eefb3d 100644 --- a/ql/src/test/results/clientpositive/vector_distinct_2.q.out +++ b/ql/src/test/results/clientpositive/vector_distinct_2.q.out @@ -158,8 +158,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_elt.q.out b/ql/src/test/results/clientpositive/vector_elt.q.out index 1b3e856..5806ca7 100644 --- a/ql/src/test/results/clientpositive/vector_elt.q.out +++ b/ql/src/test/results/clientpositive/vector_elt.q.out @@ -59,8 +59,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -166,8 +166,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_empty_where.q.out b/ql/src/test/results/clientpositive/vector_empty_where.q.out index a77f55f..388b775 100644 --- a/ql/src/test/results/clientpositive/vector_empty_where.q.out +++ b/ql/src/test/results/clientpositive/vector_empty_where.q.out @@ -63,8 +63,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -206,8 +206,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -357,8 +357,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -508,8 +508,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_groupby4.q.out b/ql/src/test/results/clientpositive/vector_groupby4.q.out index 4822871..15b0427 100644 --- a/ql/src/test/results/clientpositive/vector_groupby4.q.out +++ b/ql/src/test/results/clientpositive/vector_groupby4.q.out @@ -70,8 +70,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_groupby6.q.out b/ql/src/test/results/clientpositive/vector_groupby6.q.out index 3353fdd..31472a1 100644 --- a/ql/src/test/results/clientpositive/vector_groupby6.q.out +++ b/ql/src/test/results/clientpositive/vector_groupby6.q.out @@ -70,8 +70,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_groupby_3.q.out b/ql/src/test/results/clientpositive/vector_groupby_3.q.out index dbdbf46..173f84f 100644 --- a/ql/src/test/results/clientpositive/vector_groupby_3.q.out +++ b/ql/src/test/results/clientpositive/vector_groupby_3.q.out @@ -161,8 +161,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_groupby_reduce.q.out b/ql/src/test/results/clientpositive/vector_groupby_reduce.q.out index 56f0de2..c18ab63 100644 --- a/ql/src/test/results/clientpositive/vector_groupby_reduce.q.out +++ b/ql/src/test/results/clientpositive/vector_groupby_reduce.q.out @@ -287,8 +287,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -478,8 +478,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -755,8 +755,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -953,8 +953,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_grouping_sets.q.out b/ql/src/test/results/clientpositive/vector_grouping_sets.q.out index cf0ec94..5113966 100644 --- a/ql/src/test/results/clientpositive/vector_grouping_sets.q.out +++ b/ql/src/test/results/clientpositive/vector_grouping_sets.q.out @@ -184,8 +184,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -295,8 +295,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_if_expr.q.out b/ql/src/test/results/clientpositive/vector_if_expr.q.out index c948ea8..20c47ed 100644 --- a/ql/src/test/results/clientpositive/vector_if_expr.q.out +++ b/ql/src/test/results/clientpositive/vector_if_expr.q.out @@ -51,8 +51,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_include_no_sel.q.out b/ql/src/test/results/clientpositive/vector_include_no_sel.q.out index dae2d63..0ecc7af 100644 --- a/ql/src/test/results/clientpositive/vector_include_no_sel.q.out +++ b/ql/src/test/results/clientpositive/vector_include_no_sel.q.out @@ -259,8 +259,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_interval_1.q.out b/ql/src/test/results/clientpositive/vector_interval_1.q.out index 70b7c66..6ee3154 100644 --- a/ql/src/test/results/clientpositive/vector_interval_1.q.out +++ b/ql/src/test/results/clientpositive/vector_interval_1.q.out @@ -106,8 +106,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -217,8 +217,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -336,8 +336,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -467,8 +467,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -610,8 +610,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -735,8 +735,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -842,8 +842,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -955,8 +955,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out b/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out index 1547942..2a390fa 100644 --- a/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out +++ b/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out @@ -100,8 +100,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -265,8 +265,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -430,8 +430,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -599,8 +599,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -698,8 +698,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -865,8 +865,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1032,8 +1032,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1199,8 +1199,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out b/ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out index 1654bd9..a3f4b29 100644 --- a/ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out @@ -274,8 +274,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_left_outer_join.q.out b/ql/src/test/results/clientpositive/vector_left_outer_join.q.out index f3ddcd0..9aa6531 100644 --- a/ql/src/test/results/clientpositive/vector_left_outer_join.q.out +++ b/ql/src/test/results/clientpositive/vector_left_outer_join.q.out @@ -99,8 +99,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_left_outer_join2.q.out b/ql/src/test/results/clientpositive/vector_left_outer_join2.q.out index 3b9bbf4..7f2f17f 100644 --- a/ql/src/test/results/clientpositive/vector_left_outer_join2.q.out +++ b/ql/src/test/results/clientpositive/vector_left_outer_join2.q.out @@ -353,8 +353,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -476,8 +476,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -599,8 +599,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -722,8 +722,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_multi_insert.q.out b/ql/src/test/results/clientpositive/vector_multi_insert.q.out index 9fc92dd..65e1034 100644 --- a/ql/src/test/results/clientpositive/vector_multi_insert.q.out +++ b/ql/src/test/results/clientpositive/vector_multi_insert.q.out @@ -159,8 +159,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_non_string_partition.q.out b/ql/src/test/results/clientpositive/vector_non_string_partition.q.out index 3e4f7b9..218b562 100644 --- a/ql/src/test/results/clientpositive/vector_non_string_partition.q.out +++ b/ql/src/test/results/clientpositive/vector_non_string_partition.q.out @@ -78,8 +78,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -180,8 +180,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_null_projection.q.out b/ql/src/test/results/clientpositive/vector_null_projection.q.out index 86f96f9..4c88b00 100644 --- a/ql/src/test/results/clientpositive/vector_null_projection.q.out +++ b/ql/src/test/results/clientpositive/vector_null_projection.q.out @@ -79,8 +79,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_nvl.q.out b/ql/src/test/results/clientpositive/vector_nvl.q.out index c6eb842..7a6623e 100644 --- a/ql/src/test/results/clientpositive/vector_nvl.q.out +++ b/ql/src/test/results/clientpositive/vector_nvl.q.out @@ -61,8 +61,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -152,8 +152,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -241,8 +241,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -330,8 +330,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_orderby_5.q.out b/ql/src/test/results/clientpositive/vector_orderby_5.q.out index 324bdd0..734c6a9 100644 --- a/ql/src/test/results/clientpositive/vector_orderby_5.q.out +++ b/ql/src/test/results/clientpositive/vector_orderby_5.q.out @@ -162,8 +162,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_outer_join0.q.out b/ql/src/test/results/clientpositive/vector_outer_join0.q.out index 955c39a..9bb2fad 100644 --- a/ql/src/test/results/clientpositive/vector_outer_join0.q.out +++ b/ql/src/test/results/clientpositive/vector_outer_join0.q.out @@ -144,8 +144,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -263,8 +263,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_outer_join1.q.out b/ql/src/test/results/clientpositive/vector_outer_join1.q.out index 5096b51..ece32f6 100644 --- a/ql/src/test/results/clientpositive/vector_outer_join1.q.out +++ b/ql/src/test/results/clientpositive/vector_outer_join1.q.out @@ -304,8 +304,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -446,8 +446,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -721,8 +721,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_outer_join2.q.out b/ql/src/test/results/clientpositive/vector_outer_join2.q.out index ef6ef3a..455d09e 100644 --- a/ql/src/test/results/clientpositive/vector_outer_join2.q.out +++ b/ql/src/test/results/clientpositive/vector_outer_join2.q.out @@ -361,8 +361,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_outer_join3.q.out b/ql/src/test/results/clientpositive/vector_outer_join3.q.out index 74d774b..07a2c33 100644 --- a/ql/src/test/results/clientpositive/vector_outer_join3.q.out +++ b/ql/src/test/results/clientpositive/vector_outer_join3.q.out @@ -244,7 +244,7 @@ left outer join small_alltypesorc_a_n1 hd on hd.cstring1 = c.cstring1 ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 6]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col1"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 6]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 6]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col1"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 6]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c left outer join small_alltypesorc_a_n1 cd @@ -284,7 +284,7 @@ left outer join small_alltypesorc_a_n1 hd on hd.cstring1 = c.cstring1 ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cstring2 (type: string)","columnExprMap:":{"_col0":"cstring2"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cstring1","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 7:string"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cstring2 (type: string)","columnExprMap:":{"_col0":"cstring2"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cstring1","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 7:string"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c left outer join small_alltypesorc_a_n1 cd @@ -324,7 +324,7 @@ left outer join small_alltypesorc_a_n1 hd on hd.cstring1 = c.cstring1 and hd.cint = c.cint ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cbigint","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cbigint (type: bigint), cstring2 (type: string)","columnExprMap:":{"_col0":"cbigint","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cbigint","cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cbigint (type: bigint), cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cbigint","_col2":"cstring1","_col3":"cstring2"},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 3, 6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"0:_col2"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 3:bigint","col 7:string"],"bigTableValueExpressions:":["col 2:int","col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:int","col 1:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 3, 6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cbigint","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cbigint (type: bigint), cstring2 (type: string)","columnExprMap:":{"_col0":"cbigint","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cbigint","cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cbigint (type: bigint), cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cbigint","_col2":"cstring1","_col3":"cstring2"},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 3, 6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"0:_col2"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 3:bigint","col 7:string"],"bigTableValueExpressions:":["col 2:int","col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:int","col 1:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 3, 6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c left outer join small_alltypesorc_a_n1 cd diff --git a/ql/src/test/results/clientpositive/vector_outer_join4.q.out b/ql/src/test/results/clientpositive/vector_outer_join4.q.out index f26cfee..a96507d 100644 --- a/ql/src/test/results/clientpositive/vector_outer_join4.q.out +++ b/ql/src/test/results/clientpositive/vector_outer_join4.q.out @@ -258,7 +258,7 @@ from small_alltypesorc_b c left outer join small_alltypesorc_b cd on cd.cint = c.cint POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col10":"0:_col10","_col11":"0:_col11","_col12":"1:_col0","_col13":"1:_col1","_col14":"1:_col2","_col15":"1:_col3","_col16":"1:_col4","_col17":"1:_col5","_col18":"1:_col6","_col19":"1:_col7","_col2":"0:_col2","_col20":"1:_col8","_col21":"1:_col9","_col22":"1:_col10","_col23":"1:_col11","_col3":"0:_col3","_col4":"0:_col4","_col5":"0:_col5","_col6":"0:_col6","_col7":"0:_col7","_col8":"0:_col8","_col9":"0:_col9"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint","col 1:smallint","col 2:int","col 3:bigint","col 4:float","col 5:double","col 6:string","col 7:string","col 8:timestamp","col 9:timestamp","col 10:boolean","col 11:boolean"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint, bigint, bigint, double, double, string, string, timestamp, timestamp, bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col10":"0:_col10","_col11":"0:_col11","_col12":"1:_col0","_col13":"1:_col1","_col14":"1:_col2","_col15":"1:_col3","_col16":"1:_col4","_col17":"1:_col5","_col18":"1:_col6","_col19":"1:_col7","_col2":"0:_col2","_col20":"1:_col8","_col21":"1:_col9","_col22":"1:_col10","_col23":"1:_col11","_col3":"0:_col3","_col4":"0:_col4","_col5":"0:_col5","_col6":"0:_col6","_col7":"0:_col7","_col8":"0:_col8","_col9":"0:_col9"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint","col 1:smallint","col 2:int","col 3:bigint","col 4:float","col 5:double","col 6:string","col 7:string","col 8:timestamp","col 9:timestamp","col 10:boolean","col 11:boolean"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint, bigint, bigint, double, double, string, string, timestamp, timestamp, bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} PREHOOK: query: select * from small_alltypesorc_b c left outer join small_alltypesorc_b cd @@ -339,7 +339,7 @@ from small_alltypesorc_b c left outer join small_alltypesorc_b hd on hd.ctinyint = c.ctinyint POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} PREHOOK: query: select c.ctinyint from small_alltypesorc_b c left outer join small_alltypesorc_b hd @@ -782,7 +782,7 @@ left outer join small_alltypesorc_b hd on hd.ctinyint = c.ctinyint ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), cint (type: int)","columnExprMap:":{"_col0":"ctinyint","_col1":"cint"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 2]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 36 Data size: 8082 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 2]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), cint (type: int)","columnExprMap:":{"_col0":"ctinyint","_col1":"cint"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 2]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 36 Data size: 8082 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 2]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.ctinyint from small_alltypesorc_b c left outer join small_alltypesorc_b cd diff --git a/ql/src/test/results/clientpositive/vector_outer_join6.q.out b/ql/src/test/results/clientpositive/vector_outer_join6.q.out index e2d6cc8..7151965 100644 --- a/ql/src/test/results/clientpositive/vector_outer_join6.q.out +++ b/ql/src/test/results/clientpositive/vector_outer_join6.q.out @@ -130,7 +130,7 @@ POSTHOOK: query: explain vectorization detail formatted select tj1rnum, tj2rnum, tjoin3.rnum as rnumt3 from (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-7":{"ROOT STAGE":"TRUE"},"Stage-5":{"DEPENDENT STAGES":"Stage-7"},"Stage-0":{"DEPENDENT STAGES":"Stage-5"}},"STAGE PLANS":{"Stage-7":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_0:$hdt$_0:tjoin2_n0":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_1:tjoin3":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_0:$hdt$_0:tjoin2_n0":{"TableScan":{"alias:":"tjoin2_n0","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin2_n0","isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_1","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_21"}}}}}},"$hdt$_1:tjoin3":{"TableScan":{"alias:":"tjoin3","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin3","isTempTable:":"false","OperatorId:":"TS_8","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_9","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_19"}}}}}}}}},"Stage-5":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"tjoin1_n0","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin1_n0","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1]"},"Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_23","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col2":"1:_col0"},"condition map:":[{"":"Right Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2"],"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_24","children":{"Select Operator":{"expressions:":"_col2 (type: int), _col0 (type: int), _col1 (type: int)","columnExprMap:":{"_col0":"_col2","_col1":"_col0","_col2":"_col1"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 0, 1]"},"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_25","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col3":"1:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 2:int","col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col3"],"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_26","children":{"Select Operator":{"expressions:":"_col0 (type: int), _col1 (type: int), _col3 (type: int)","columnExprMap:":{"_col0":"_col0","_col1":"_col1","_col2":"_col3"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1, 2]"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_27","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_28"}}}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"3","includeColumns:":"[0, 1]","dataColumns:":["rnum:int","c1:int","c2:int"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_29"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-7":{"ROOT STAGE":"TRUE"},"Stage-5":{"DEPENDENT STAGES":"Stage-7"},"Stage-0":{"DEPENDENT STAGES":"Stage-5"}},"STAGE PLANS":{"Stage-7":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_0:$hdt$_0:tjoin2_n0":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_1:tjoin3":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_0:$hdt$_0:tjoin2_n0":{"TableScan":{"alias:":"tjoin2_n0","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin2_n0","isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_1","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_21"}}}}}},"$hdt$_1:tjoin3":{"TableScan":{"alias:":"tjoin3","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin3","isTempTable:":"false","OperatorId:":"TS_8","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_9","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_19"}}}}}}}}},"Stage-5":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"tjoin1_n0","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin1_n0","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1]"},"Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_23","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col2":"1:_col0"},"condition map:":[{"":"Right Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2"],"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_24","children":{"Select Operator":{"expressions:":"_col2 (type: int), _col0 (type: int), _col1 (type: int)","columnExprMap:":{"_col0":"_col2","_col1":"_col0","_col2":"_col1"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 0, 1]"},"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_25","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col3":"1:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 2:int","col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col3"],"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_26","children":{"Select Operator":{"expressions:":"_col0 (type: int), _col1 (type: int), _col3 (type: int)","columnExprMap:":{"_col0":"_col0","_col1":"_col1","_col2":"_col3"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1, 2]"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_27","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_28"}}}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"3","includeColumns:":"[0, 1]","dataColumns:":["rnum:int","c1:int","c2:int"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_29"}}}}}} PREHOOK: query: select tj1rnum, tj2rnum, tjoin3.rnum as rnumt3 from (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 PREHOOK: type: QUERY @@ -157,7 +157,7 @@ POSTHOOK: query: explain vectorization detail formatted select tj1rnum, tj2rnum as rnumt3 from (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-7":{"ROOT STAGE":"TRUE"},"Stage-5":{"DEPENDENT STAGES":"Stage-7"},"Stage-0":{"DEPENDENT STAGES":"Stage-5"}},"STAGE PLANS":{"Stage-7":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_0:$hdt$_0:tjoin2_n0":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_1:tjoin3":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_0:$hdt$_0:tjoin2_n0":{"TableScan":{"alias:":"tjoin2_n0","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin2_n0","isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_1","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_21"}}}}}},"$hdt$_1:tjoin3":{"TableScan":{"alias:":"tjoin3","columns:":["c1"],"database:":"default","Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin3","isTempTable:":"false","OperatorId:":"TS_8","children":{"Select Operator":{"expressions:":"c1 (type: int)","columnExprMap:":{"_col0":"c1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_9","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_19"}}}}}}}}},"Stage-5":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"tjoin1_n0","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin1_n0","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1]"},"Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_23","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col2":"1:_col0"},"condition map:":[{"":"Right Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2"],"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_24","children":{"Select Operator":{"expressions:":"_col2 (type: int), _col0 (type: int), _col1 (type: int)","columnExprMap:":{"_col0":"_col2","_col1":"_col0","_col2":"_col1"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 0, 1]"},"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_25","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 2:int","col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_26","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_27"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"3","includeColumns:":"[0, 1]","dataColumns:":["rnum:int","c1:int","c2:int"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_28"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-7":{"ROOT STAGE":"TRUE"},"Stage-5":{"DEPENDENT STAGES":"Stage-7"},"Stage-0":{"DEPENDENT STAGES":"Stage-5"}},"STAGE PLANS":{"Stage-7":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_0:$hdt$_0:tjoin2_n0":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_1:tjoin3":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_0:$hdt$_0:tjoin2_n0":{"TableScan":{"alias:":"tjoin2_n0","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin2_n0","isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_1","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_21"}}}}}},"$hdt$_1:tjoin3":{"TableScan":{"alias:":"tjoin3","columns:":["c1"],"database:":"default","Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin3","isTempTable:":"false","OperatorId:":"TS_8","children":{"Select Operator":{"expressions:":"c1 (type: int)","columnExprMap:":{"_col0":"c1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_9","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_19"}}}}}}}}},"Stage-5":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"tjoin1_n0","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin1_n0","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1]"},"Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_23","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col2":"1:_col0"},"condition map:":[{"":"Right Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2"],"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_24","children":{"Select Operator":{"expressions:":"_col2 (type: int), _col0 (type: int), _col1 (type: int)","columnExprMap:":{"_col0":"_col2","_col1":"_col0","_col2":"_col1"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 0, 1]"},"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_25","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 2:int","col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_26","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_27"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"3","includeColumns:":"[0, 1]","dataColumns:":["rnum:int","c1:int","c2:int"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_28"}}}}}} PREHOOK: query: select tj1rnum, tj2rnum as rnumt3 from (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 PREHOOK: type: QUERY diff --git a/ql/src/test/results/clientpositive/vector_outer_join_no_keys.q.out b/ql/src/test/results/clientpositive/vector_outer_join_no_keys.q.out index 7454c4b..750ef5c 100644 --- a/ql/src/test/results/clientpositive/vector_outer_join_no_keys.q.out +++ b/ql/src/test/results/clientpositive/vector_outer_join_no_keys.q.out @@ -116,8 +116,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -261,8 +261,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_reduce1.q.out b/ql/src/test/results/clientpositive/vector_reduce1.q.out index 8be8dba..99bdfe6 100644 --- a/ql/src/test/results/clientpositive/vector_reduce1.q.out +++ b/ql/src/test/results/clientpositive/vector_reduce1.q.out @@ -145,8 +145,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_reduce2.q.out b/ql/src/test/results/clientpositive/vector_reduce2.q.out index 7799746..4bc7bf5 100644 --- a/ql/src/test/results/clientpositive/vector_reduce2.q.out +++ b/ql/src/test/results/clientpositive/vector_reduce2.q.out @@ -145,8 +145,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_reduce3.q.out b/ql/src/test/results/clientpositive/vector_reduce3.q.out index a7ad970..22923ee 100644 --- a/ql/src/test/results/clientpositive/vector_reduce3.q.out +++ b/ql/src/test/results/clientpositive/vector_reduce3.q.out @@ -145,8 +145,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out b/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out index 15f3b09..a4048bd 100644 --- a/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out +++ b/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out @@ -83,8 +83,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_string_concat.q.out b/ql/src/test/results/clientpositive/vector_string_concat.q.out index a61c6f0..68b011d 100644 --- a/ql/src/test/results/clientpositive/vector_string_concat.q.out +++ b/ql/src/test/results/clientpositive/vector_string_concat.q.out @@ -151,8 +151,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -369,8 +369,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_struct_in.q.out b/ql/src/test/results/clientpositive/vector_struct_in.q.out index 66dd49a..265825b 100644 --- a/ql/src/test/results/clientpositive/vector_struct_in.q.out +++ b/ql/src/test/results/clientpositive/vector_struct_in.q.out @@ -90,8 +90,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -203,8 +203,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -340,8 +340,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -453,8 +453,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -590,8 +590,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -703,8 +703,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -843,8 +843,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -959,8 +959,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_tablesample_rows.q.out b/ql/src/test/results/clientpositive/vector_tablesample_rows.q.out index ae13ae6..ef20c86 100644 --- a/ql/src/test/results/clientpositive/vector_tablesample_rows.q.out +++ b/ql/src/test/results/clientpositive/vector_tablesample_rows.q.out @@ -47,8 +47,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -142,8 +142,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_udf3.q.out b/ql/src/test/results/clientpositive/vector_udf3.q.out index e230c0e..27dde3e 100644 --- a/ql/src/test/results/clientpositive/vector_udf3.q.out +++ b/ql/src/test/results/clientpositive/vector_udf3.q.out @@ -48,8 +48,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_varchar_4.q.out b/ql/src/test/results/clientpositive/vector_varchar_4.q.out index 24016b2..0ffb777 100644 --- a/ql/src/test/results/clientpositive/vector_varchar_4.q.out +++ b/ql/src/test/results/clientpositive/vector_varchar_4.q.out @@ -174,8 +174,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out b/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out index 282aec4..29bc9df 100644 --- a/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out +++ b/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out @@ -191,8 +191,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -300,8 +300,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -411,8 +411,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_varchar_simple.q.out b/ql/src/test/results/clientpositive/vector_varchar_simple.q.out index f3aec13..154e752 100644 --- a/ql/src/test/results/clientpositive/vector_varchar_simple.q.out +++ b/ql/src/test/results/clientpositive/vector_varchar_simple.q.out @@ -84,8 +84,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -193,8 +193,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -316,8 +316,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vector_when_case_null.q.out b/ql/src/test/results/clientpositive/vector_when_case_null.q.out index 3ce7b41..2cdbe38 100644 --- a/ql/src/test/results/clientpositive/vector_when_case_null.q.out +++ b/ql/src/test/results/clientpositive/vector_when_case_null.q.out @@ -77,8 +77,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_1.q.out b/ql/src/test/results/clientpositive/vectorization_1.q.out index 767db3c..6801978 100644 --- a/ql/src/test/results/clientpositive/vectorization_1.q.out +++ b/ql/src/test/results/clientpositive/vectorization_1.q.out @@ -99,8 +99,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_10.q.out b/ql/src/test/results/clientpositive/vectorization_10.q.out index 57a6fe9..69aa608 100644 --- a/ql/src/test/results/clientpositive/vectorization_10.q.out +++ b/ql/src/test/results/clientpositive/vectorization_10.q.out @@ -94,8 +94,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_11.q.out b/ql/src/test/results/clientpositive/vectorization_11.q.out index 5792d0d..06cde65 100644 --- a/ql/src/test/results/clientpositive/vectorization_11.q.out +++ b/ql/src/test/results/clientpositive/vectorization_11.q.out @@ -76,8 +76,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_12.q.out b/ql/src/test/results/clientpositive/vectorization_12.q.out index 39c3476..527d741 100644 --- a/ql/src/test/results/clientpositive/vectorization_12.q.out +++ b/ql/src/test/results/clientpositive/vectorization_12.q.out @@ -126,8 +126,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_13.q.out b/ql/src/test/results/clientpositive/vectorization_13.q.out index 1cf64a3..cae441e 100644 --- a/ql/src/test/results/clientpositive/vectorization_13.q.out +++ b/ql/src/test/results/clientpositive/vectorization_13.q.out @@ -128,8 +128,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -458,8 +458,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_14.q.out b/ql/src/test/results/clientpositive/vectorization_14.q.out index 9dbc20c..86be72e 100644 --- a/ql/src/test/results/clientpositive/vectorization_14.q.out +++ b/ql/src/test/results/clientpositive/vectorization_14.q.out @@ -128,8 +128,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_15.q.out b/ql/src/test/results/clientpositive/vectorization_15.q.out index 589b8f5..db6dea1 100644 --- a/ql/src/test/results/clientpositive/vectorization_15.q.out +++ b/ql/src/test/results/clientpositive/vectorization_15.q.out @@ -124,8 +124,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_16.q.out b/ql/src/test/results/clientpositive/vectorization_16.q.out index 18120f2..0d58e55 100644 --- a/ql/src/test/results/clientpositive/vectorization_16.q.out +++ b/ql/src/test/results/clientpositive/vectorization_16.q.out @@ -101,8 +101,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_17.q.out b/ql/src/test/results/clientpositive/vectorization_17.q.out index 6c9212f..b782e3a 100644 --- a/ql/src/test/results/clientpositive/vectorization_17.q.out +++ b/ql/src/test/results/clientpositive/vectorization_17.q.out @@ -94,8 +94,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_2.q.out b/ql/src/test/results/clientpositive/vectorization_2.q.out index 05f22bb..be7c843 100644 --- a/ql/src/test/results/clientpositive/vectorization_2.q.out +++ b/ql/src/test/results/clientpositive/vectorization_2.q.out @@ -103,8 +103,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_3.q.out b/ql/src/test/results/clientpositive/vectorization_3.q.out index b5ee0fb..7bfb313 100644 --- a/ql/src/test/results/clientpositive/vectorization_3.q.out +++ b/ql/src/test/results/clientpositive/vectorization_3.q.out @@ -108,8 +108,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_4.q.out b/ql/src/test/results/clientpositive/vectorization_4.q.out index 6dd398d..0700753 100644 --- a/ql/src/test/results/clientpositive/vectorization_4.q.out +++ b/ql/src/test/results/clientpositive/vectorization_4.q.out @@ -103,8 +103,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_5.q.out b/ql/src/test/results/clientpositive/vectorization_5.q.out index dc539e0..6c82694 100644 --- a/ql/src/test/results/clientpositive/vectorization_5.q.out +++ b/ql/src/test/results/clientpositive/vectorization_5.q.out @@ -96,8 +96,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_6.q.out b/ql/src/test/results/clientpositive/vectorization_6.q.out index 6d17130..1c6e0c5 100644 --- a/ql/src/test/results/clientpositive/vectorization_6.q.out +++ b/ql/src/test/results/clientpositive/vectorization_6.q.out @@ -88,8 +88,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_7.q.out b/ql/src/test/results/clientpositive/vectorization_7.q.out index 3999bf2..a1eb6f1 100644 --- a/ql/src/test/results/clientpositive/vectorization_7.q.out +++ b/ql/src/test/results/clientpositive/vectorization_7.q.out @@ -100,8 +100,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -320,8 +320,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_8.q.out b/ql/src/test/results/clientpositive/vectorization_8.q.out index f658f80..3172f65 100644 --- a/ql/src/test/results/clientpositive/vectorization_8.q.out +++ b/ql/src/test/results/clientpositive/vectorization_8.q.out @@ -96,8 +96,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -303,8 +303,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_9.q.out b/ql/src/test/results/clientpositive/vectorization_9.q.out index 18120f2..0d58e55 100644 --- a/ql/src/test/results/clientpositive/vectorization_9.q.out +++ b/ql/src/test/results/clientpositive/vectorization_9.q.out @@ -101,8 +101,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out index 44e4632..b1f4bdc 100644 --- a/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out +++ b/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out @@ -68,8 +68,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_limit.q.out b/ql/src/test/results/clientpositive/vectorization_limit.q.out index 438c060..cd256c6 100644 --- a/ql/src/test/results/clientpositive/vectorization_limit.q.out +++ b/ql/src/test/results/clientpositive/vectorization_limit.q.out @@ -36,8 +36,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -137,8 +137,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -266,8 +266,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -446,8 +446,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -750,8 +750,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_nested_udf.q.out b/ql/src/test/results/clientpositive/vectorization_nested_udf.q.out index c0677bd..b4fe31a 100644 --- a/ql/src/test/results/clientpositive/vectorization_nested_udf.q.out +++ b/ql/src/test/results/clientpositive/vectorization_nested_udf.q.out @@ -56,8 +56,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_offset_limit.q.out b/ql/src/test/results/clientpositive/vectorization_offset_limit.q.out index 18d236c..5920b3d 100644 --- a/ql/src/test/results/clientpositive/vectorization_offset_limit.q.out +++ b/ql/src/test/results/clientpositive/vectorization_offset_limit.q.out @@ -40,8 +40,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -117,8 +117,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_part_project.q.out b/ql/src/test/results/clientpositive/vectorization_part_project.q.out index e34bb24..50052fd 100644 --- a/ql/src/test/results/clientpositive/vectorization_part_project.q.out +++ b/ql/src/test/results/clientpositive/vectorization_part_project.q.out @@ -78,8 +78,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorization_pushdown.q.out b/ql/src/test/results/clientpositive/vectorization_pushdown.q.out index 6216581..e962362 100644 --- a/ql/src/test/results/clientpositive/vectorization_pushdown.q.out +++ b/ql/src/test/results/clientpositive/vectorization_pushdown.q.out @@ -39,8 +39,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorized_case.q.out b/ql/src/test/results/clientpositive/vectorized_case.q.out index 9c7339e..53da2a6 100644 --- a/ql/src/test/results/clientpositive/vectorized_case.q.out +++ b/ql/src/test/results/clientpositive/vectorized_case.q.out @@ -82,8 +82,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -227,8 +227,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -310,8 +310,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -425,8 +425,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -517,7 +517,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:member:decimal(10,0), 1:attr:decimal(10,0), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:member:decimal(10,0)/DECIMAL_64, 1:attr:decimal(10,0)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: CASE WHEN ((member = 1)) THEN ((attr + 1)) ELSE ((attr + 2)) END (type: decimal(11,0)) outputColumnNames: _col0 @@ -525,7 +525,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [6] - selectExpressions: IfExprCondExprCondExpr(col 3:boolean, col 4:decimal(11,0)col 5:decimal(11,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, DecimalColAddDecimalScalar(col 1:decimal(10,0), val 1) -> 4:decimal(11,0), DecimalColAddDecimalScalar(col 1:decimal(10,0), val 2) -> 5:decimal(11,0)) -> 6:decimal(11,0) + selectExpressions: IfExprCondExprCondExpr(col 3:boolean, col 7:decimal(11,0)col 8:decimal(11,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, ConvertDecimal64ToDecimal(col 4:decimal(11,0)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 1, decimalVal 1) -> 4:decimal(11,0)/DECIMAL_64) -> 7:decimal(11,0), ConvertDecimal64ToDecimal(col 5:decimal(11,0)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 2, decimalVal 2) -> 5:decimal(11,0)/DECIMAL_64) -> 8:decimal(11,0)) -> 6:decimal(11,0) Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -541,8 +541,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -550,9 +550,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: member:decimal(10,0), attr:decimal(10,0) + dataColumns: member:decimal(10,0)/DECIMAL_64, attr:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, decimal(11,0), decimal(11,0), decimal(11,0)] + scratchColumnTypeNames: [bigint, decimal(11,0)/DECIMAL_64, decimal(11,0)/DECIMAL_64, decimal(11,0), decimal(11,0), decimal(11,0)] Stage: Stage-0 Fetch Operator @@ -594,15 +594,15 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:member:decimal(10,0), 1:attr:decimal(10,0), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:member:decimal(10,0)/DECIMAL_64, 1:attr:decimal(10,0)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: CASE WHEN ((member = 1)) THEN (1) ELSE ((attr + 2)) END (type: decimal(11,0)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [6] - selectExpressions: IfExprColumnCondExpr(col 3:boolean, col 4:decimal(1,0)col 5:decimal(11,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, ConstantVectorExpression(val 1) -> 4:decimal(1,0), DecimalColAddDecimalScalar(col 1:decimal(10,0), val 2) -> 5:decimal(11,0)) -> 6:decimal(11,0) + projectedOutputColumnNums: [8] + selectExpressions: VectorUDFAdaptor(CASE WHEN ((member = 1)) THEN (1) ELSE ((attr + 2)) END)(children: VectorUDFAdaptor((member = 1)) -> 6:boolean, Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 2, decimalVal 2) -> 7:decimal(11,0)/DECIMAL_64) -> 8:decimal(11,0) Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -618,8 +618,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -627,9 +627,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: member:decimal(10,0), attr:decimal(10,0) + dataColumns: member:decimal(10,0)/DECIMAL_64, attr:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, decimal(1,0), decimal(11,0), decimal(11,0)] + scratchColumnTypeNames: [bigint, decimal(1,0), decimal(11,0)/DECIMAL_64, bigint, decimal(11,0)/DECIMAL_64, decimal(11,0)] Stage: Stage-0 Fetch Operator @@ -671,15 +671,15 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:member:decimal(10,0), 1:attr:decimal(10,0), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:member:decimal(10,0)/DECIMAL_64, 1:attr:decimal(10,0)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: CASE WHEN ((member = 1)) THEN ((attr + 1)) ELSE (2) END (type: decimal(11,0)) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [6] - selectExpressions: IfExprCondExprColumn(col 3:boolean, col 4:decimal(11,0), col 5:decimal(1,0))(children: VectorUDFAdaptor((member = 1)) -> 3:boolean, DecimalColAddDecimalScalar(col 1:decimal(10,0), val 1) -> 4:decimal(11,0), ConstantVectorExpression(val 2) -> 5:decimal(1,0)) -> 6:decimal(11,0) + projectedOutputColumnNums: [8] + selectExpressions: VectorUDFAdaptor(CASE WHEN ((member = 1)) THEN ((attr + 1)) ELSE (2) END)(children: VectorUDFAdaptor((member = 1)) -> 6:boolean, Decimal64ColAddDecimal64Scalar(col 1:decimal(10,0)/DECIMAL_64, decimal64Val 1, decimalVal 1) -> 7:decimal(11,0)/DECIMAL_64) -> 8:decimal(11,0) Statistics: Num rows: 3 Data size: 672 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -695,8 +695,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -704,9 +704,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: member:decimal(10,0), attr:decimal(10,0) + dataColumns: member:decimal(10,0)/DECIMAL_64, attr:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, decimal(11,0), decimal(1,0), decimal(11,0)] + scratchColumnTypeNames: [bigint, decimal(11,0)/DECIMAL_64, decimal(1,0), bigint, decimal(11,0)/DECIMAL_64, decimal(11,0)] Stage: Stage-0 Fetch Operator @@ -790,8 +790,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -867,8 +867,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -944,8 +944,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorized_casts.q.out b/ql/src/test/results/clientpositive/vectorized_casts.q.out index c79d8d7..608e85a 100644 --- a/ql/src/test/results/clientpositive/vectorized_casts.q.out +++ b/ql/src/test/results/clientpositive/vectorized_casts.q.out @@ -196,8 +196,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/vectorized_context.q.out b/ql/src/test/results/clientpositive/vectorized_context.q.out index 539223b..e322163 100644 --- a/ql/src/test/results/clientpositive/vectorized_context.q.out +++ b/ql/src/test/results/clientpositive/vectorized_context.q.out @@ -194,8 +194,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out b/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out index 688d0ed..84f9573 100644 --- a/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out +++ b/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out @@ -284,8 +284,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -572,8 +572,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -864,8 +864,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1134,8 +1134,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1258,8 +1258,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorized_mapjoin.q.out b/ql/src/test/results/clientpositive/vectorized_mapjoin.q.out index 3a1c0e7..51af71a 100644 --- a/ql/src/test/results/clientpositive/vectorized_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/vectorized_mapjoin.q.out @@ -111,8 +111,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out b/ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out index e9a0e45..3b775a1 100644 --- a/ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out +++ b/ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out @@ -132,8 +132,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out b/ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out index fb7198d..094c3ce 100644 --- a/ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out +++ b/ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out @@ -151,8 +151,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -325,8 +325,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -499,8 +499,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorized_math_funcs.q.out b/ql/src/test/results/clientpositive/vectorized_math_funcs.q.out index aea8f0a..9b96477 100644 --- a/ql/src/test/results/clientpositive/vectorized_math_funcs.q.out +++ b/ql/src/test/results/clientpositive/vectorized_math_funcs.q.out @@ -149,8 +149,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/vectorized_string_funcs.q.out b/ql/src/test/results/clientpositive/vectorized_string_funcs.q.out index 77a91b9..5b48222 100644 --- a/ql/src/test/results/clientpositive/vectorized_string_funcs.q.out +++ b/ql/src/test/results/clientpositive/vectorized_string_funcs.q.out @@ -75,8 +75,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp.q.out index 7e03bf3..f845873 100644 --- a/ql/src/test/results/clientpositive/vectorized_timestamp.q.out +++ b/ql/src/test/results/clientpositive/vectorized_timestamp.q.out @@ -153,8 +153,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -255,8 +255,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -340,8 +340,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -447,8 +447,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out index 9f90e82..2291856 100644 --- a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out +++ b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out @@ -279,8 +279,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -472,8 +472,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -649,8 +649,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -826,8 +826,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -952,8 +952,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1059,8 +1059,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1179,8 +1179,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out index f12a2b6..95f07d9 100644 --- a/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out +++ b/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out @@ -81,8 +81,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -241,8 +241,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true