From a376a205d09d55bc9d0535e2bdb67a3b5c784cef Mon Sep 17 00:00:00 2001 From: Prasanth Jayachandran Date: Mon, 28 May 2018 12:41:17 -0700 Subject: [PATCH] HIVE-19629 : Enable Decimal64 reader after orc version upgrade (Prasanth J via Matt McCline) --- .../java/org/apache/hadoop/hive/conf/HiveConf.java | 2 + .../test/resources/testconfiguration.properties | 3 + .../hive/llap/io/api/impl/LlapInputFormat.java | 6 + .../io/decode/GenericColumnVectorProducer.java | 9 +- .../llap/io/decode/OrcEncodedDataConsumer.java | 40 +- .../hive/llap/io/encoded/OrcEncodedDataReader.java | 65 +- .../llap/io/encoded/SerDeEncodedDataReader.java | 3 + .../io/encoded/VectorDeserializeOrcWriter.java | 46 +- .../llap/io/metadata/ConsumerFileMetadata.java | 2 + .../hive/llap/io/metadata/OrcFileMetadata.java | 9 +- .../apache/hadoop/hive/ql/exec/FetchOperator.java | 4 +- .../vector/VectorizedInputFormatInterface.java | 1 + .../hive/ql/exec/vector/VectorizedRowBatchCtx.java | 5 + .../hadoop/hive/ql/io/NullRowsInputFormat.java | 6 + .../hadoop/hive/ql/io/orc/OrcInputFormat.java | 8 +- .../hadoop/hive/ql/io/orc/OrcRawRecordMerger.java | 22 +- .../org/apache/hadoop/hive/ql/io/orc/Reader.java | 12 +- .../apache/hadoop/hive/ql/io/orc/ReaderImpl.java | 9 +- .../hadoop/hive/ql/io/orc/RecordReaderImpl.java | 25 +- .../ql/io/orc/VectorizedOrcAcidRowBatchReader.java | 24 +- .../hive/ql/io/orc/VectorizedOrcInputFormat.java | 8 +- .../apache/hadoop/hive/ql/io/orc/WriterImpl.java | 22 +- .../io/orc/encoded/EncodedTreeReaderFactory.java | 190 +++- .../ql/io/parquet/MapredParquetInputFormat.java | 6 + .../hive/ql/optimizer/physical/Vectorizer.java | 20 +- .../hive/ql/io/orc/TestInputOutputFormat.java | 81 +- .../hive/ql/io/orc/TestOrcRawRecordMerger.java | 10 +- .../hive/ql/io/orc/TestVectorizedORCReader.java | 3 +- .../orc/TestVectorizedOrcAcidRowBatchReader.java | 2 +- ql/src/test/queries/clientpositive/llap_acid2.q | 31 +- .../queries/clientpositive/llap_decimal64_reader.q | 54 + .../queries/clientpositive/llap_uncompressed.q | 13 +- .../results/clientpositive/llap/llap_acid2.q.out | 302 ++++-- .../llap/llap_decimal64_reader.q.out | 303 ++++++ .../clientpositive/llap/llap_partitioned.q.out | 11 +- .../results/clientpositive/llap/llap_text.q.out | 1082 ++++++++++++++++++++ .../clientpositive/llap/llap_uncompressed.q.out | 283 +++++ .../results/clientpositive/llap/mergejoin.q.out | 116 +-- ...ma_evol_text_vec_part_all_complex_llap_io.q.out | 12 +- ..._evol_text_vec_part_all_primitive_llap_io.q.out | 19 +- .../llap/schema_evol_text_vec_part_llap_io.q.out | 27 +- .../llap/schema_evol_text_vec_table_llap_io.q.out | 42 +- .../llap/vector_char_varchar_1.q.out | 6 +- .../llap/vector_create_struct_table.q.out | 9 +- .../clientpositive/llap/vector_decimal_10_0.q.out | 15 +- .../llap/vector_decimal_aggregate.q.out | 32 +- .../clientpositive/llap/vector_decimal_cast.q.out | 7 +- .../llap/vector_decimal_precision.q.out | 7 +- .../clientpositive/llap/vector_decimal_round.q.out | 62 +- .../clientpositive/llap/vector_decimal_udf2.q.out | 38 +- .../clientpositive/llap/vector_groupby_cube1.q.out | 15 +- .../llap/vector_groupby_mapjoin.q.out | 6 +- .../llap/vector_groupby_sort_11.q.out | 12 +- .../llap/vector_groupby_sort_8.q.out | 3 +- .../clientpositive/llap/vector_llap_text_1.q.out | 6 +- .../clientpositive/llap/vector_map_order.q.out | 3 +- .../llap/vector_mapjoin_reduce.q.out | 18 +- .../clientpositive/llap/vector_null_map.q.out | 6 +- .../llap/vector_orc_merge_incompat_schema.q.out | 7 +- .../clientpositive/llap/vector_order_null.q.out | 33 +- .../llap/vector_outer_reference_windowed.q.out | 176 ++-- .../results/clientpositive/llap/vector_ptf_1.q.out | 3 +- .../vector_reduce_groupby_duplicate_cols.q.out | 3 +- .../clientpositive/llap/vector_retry_failure.q.out | 3 +- .../clientpositive/llap/vector_windowing.q.out | 141 +-- .../llap/vector_windowing_expressions.q.out | 50 +- .../clientpositive/llap/vector_windowing_gby.q.out | 7 +- .../llap/vector_windowing_gby2.q.out | 16 +- .../llap/vector_windowing_multipartitioning.q.out | 42 +- .../llap/vector_windowing_navfn.q.out | 57 +- .../llap/vector_windowing_order_null.q.out | 56 +- .../llap/vector_windowing_range_multiorder.q.out | 77 +- .../llap/vector_windowing_rank.q.out | 70 +- .../llap/vector_windowing_streaming.q.out | 17 +- .../llap/vector_windowing_windowspec.q.out | 77 +- .../llap/vector_windowing_windowspec4.q.out | 3 +- .../llap/vectorized_bucketmapjoin1.q.out | 19 +- .../vectorized_dynamic_partition_pruning.q.out | 287 +++--- 78 files changed, 3181 insertions(+), 1146 deletions(-) create mode 100644 ql/src/test/queries/clientpositive/llap_decimal64_reader.q create mode 100644 ql/src/test/results/clientpositive/llap/llap_decimal64_reader.q.out create mode 100644 ql/src/test/results/clientpositive/llap/llap_text.q.out create mode 100644 ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index f48d0044b1..fecddc5a8f 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -3618,6 +3618,8 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "internal use only. When false, don't suppress fatal exceptions like\n" + "NullPointerException, etc so the query will fail and assure it will be noticed", true), + HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS("hive.exec.orc.use.decimal64.column.vectors", true, + "Whether ORC readers should make use of fast decimal64 column vectors when possible"), HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true, "This property has been extended to control " + "whether to check, convert, and normalize partition value to conform to its column type in " diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index f7def351dd..6479a80dbc 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -553,6 +553,9 @@ minillaplocal.query.files=\ llap_partitioned.q,\ llap_smb.q,\ llap_vector_nohybridgrace.q,\ + llap_uncompressed.q,\ + llap_decimal64_reader.q,\ + llap_text.q,\ load_data_acid_rename.q,\ load_data_using_job.q,\ load_dyn_part5.q,\ diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java index 6d29163fbf..3c3f4a5a1f 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.llap.io.api.impl; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.BatchToRowInputFormat; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -229,4 +230,9 @@ static TableScanOperator findTsOp(MapWork mapWork) throws HiveException { } return tableScanOperator; } + + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return new VectorizedSupport.Support[] {VectorizedSupport.Support.DECIMAL_64}; + } } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java index 7af1b050ce..32f3beda54 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java @@ -31,13 +31,11 @@ import org.apache.hadoop.hive.llap.counters.QueryFragmentCounters; import org.apache.hadoop.hive.llap.io.api.impl.ColumnVectorBatch; import org.apache.hadoop.hive.llap.io.api.impl.LlapIoImpl; -import org.apache.hadoop.hive.llap.io.decode.ColumnVectorProducer.Includes; import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader; import org.apache.hadoop.hive.llap.io.metadata.ConsumerFileMetadata; import org.apache.hadoop.hive.llap.io.metadata.ConsumerStripeMetadata; import org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheMetrics; import org.apache.hadoop.hive.llap.metrics.LlapDaemonIOMetrics; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.orc.encoded.Consumer; import org.apache.hadoop.hive.ql.io.orc.encoded.IoTrace; @@ -52,8 +50,8 @@ import org.apache.hadoop.mapred.Reporter; import org.apache.hive.common.util.FixedSizedObjectPool; import org.apache.orc.CompressionKind; +import org.apache.orc.OrcFile; import org.apache.orc.OrcProto; -import org.apache.orc.OrcUtils; import org.apache.orc.OrcProto.ColumnEncoding; import org.apache.orc.OrcProto.RowIndex; import org.apache.orc.OrcProto.RowIndexEntry; @@ -289,5 +287,10 @@ public CompressionKind getCompressionKind() { public TypeDescription getSchema() { return schema; } + + @Override + public OrcFile.Version getFileVersion() { + return null; + } } } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java index feccb878b7..0d7435c5b7 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.llap.metrics.LlapDaemonIOMetrics; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; @@ -73,10 +74,11 @@ private IoTrace trace; private final Includes includes; private TypeDescription[] batchSchemas; + private boolean useDecimal64ColumnVectors; public OrcEncodedDataConsumer( - Consumer consumer, Includes includes, boolean skipCorrupt, - QueryFragmentCounters counters, LlapDaemonIOMetrics ioMetrics) { + Consumer consumer, Includes includes, boolean skipCorrupt, + QueryFragmentCounters counters, LlapDaemonIOMetrics ioMetrics) { super(consumer, includes.getPhysicalColumnIds().size(), ioMetrics); this.includes = includes; // TODO: get rid of this @@ -84,6 +86,10 @@ public OrcEncodedDataConsumer( this.counters = counters; } + public void setUseDecimal64ColumnVectors(final boolean useDecimal64ColumnVectors) { + this.useDecimal64ColumnVectors = useDecimal64ColumnVectors; + } + public void setFileMetadata(ConsumerFileMetadata f) { assert fileMetadata == null; fileMetadata = f; @@ -153,7 +159,7 @@ protected void decodeBatch(OrcEncodedColumnBatch batch, if (cvb.cols[idx] == null) { // Orc store rows inside a root struct (hive writes it this way). // When we populate column vectors we skip over the root struct. - cvb.cols[idx] = createColumn(batchSchemas[idx], VectorizedRowBatch.DEFAULT_SIZE); + cvb.cols[idx] = createColumn(batchSchemas[idx], VectorizedRowBatch.DEFAULT_SIZE, useDecimal64ColumnVectors); } trace.logTreeReaderNextVector(idx); @@ -217,10 +223,10 @@ private void createColumnReaders(OrcEncodedColumnBatch batch, TreeReaderFactory.Context context = new TreeReaderFactory.ReaderContext() .setSchemaEvolution(evolution).skipCorrupt(skipCorrupt) .writerTimeZone(stripeMetadata.getWriterTimezone()) - ; + .fileFormat(fileMetadata == null ? null : fileMetadata.getFileVersion()); this.batchSchemas = includes.getBatchReaderTypes(fileSchema); StructTreeReader treeReader = EncodedTreeReaderFactory.createRootTreeReader( - batchSchemas, stripeMetadata.getEncodings(), batch, codec, context); + batchSchemas, stripeMetadata.getEncodings(), batch, codec, context, useDecimal64ColumnVectors); this.columnReaders = treeReader.getChildReaders(); if (LlapIoImpl.LOG.isDebugEnabled()) { @@ -232,7 +238,7 @@ private void createColumnReaders(OrcEncodedColumnBatch batch, positionInStreams(columnReaders, batch.getBatchKey(), stripeMetadata); } - private ColumnVector createColumn(TypeDescription type, int batchSize) { + private ColumnVector createColumn(TypeDescription type, int batchSize, final boolean useDecimal64ColumnVectors) { switch (type.getCategory()) { case BOOLEAN: case BYTE: @@ -252,30 +258,34 @@ private ColumnVector createColumn(TypeDescription type, int batchSize) { case TIMESTAMP: return new TimestampColumnVector(batchSize); case DECIMAL: - return new DecimalColumnVector(batchSize, type.getPrecision(), - type.getScale()); + if (useDecimal64ColumnVectors && type.getPrecision() <= TypeDescription.MAX_DECIMAL64_PRECISION) { + return new Decimal64ColumnVector(batchSize, type.getPrecision(), type.getScale()); + } else { + return new DecimalColumnVector(batchSize, type.getPrecision(), type.getScale()); + } case STRUCT: { List subtypeIdxs = type.getChildren(); ColumnVector[] fieldVector = new ColumnVector[subtypeIdxs.size()]; - for(int i = 0; i < fieldVector.length; ++i) { - fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize); + for (int i = 0; i < fieldVector.length; ++i) { + fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize, useDecimal64ColumnVectors); } return new StructColumnVector(batchSize, fieldVector); } case UNION: { List subtypeIdxs = type.getChildren(); ColumnVector[] fieldVector = new ColumnVector[subtypeIdxs.size()]; - for(int i=0; i < fieldVector.length; ++i) { - fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize); + for (int i = 0; i < fieldVector.length; ++i) { + fieldVector[i] = createColumn(subtypeIdxs.get(i), batchSize, useDecimal64ColumnVectors); } return new UnionColumnVector(batchSize, fieldVector); } case LIST: - return new ListColumnVector(batchSize, createColumn(type.getChildren().get(0), batchSize)); + return new ListColumnVector(batchSize, createColumn(type.getChildren().get(0), batchSize, + useDecimal64ColumnVectors)); case MAP: List subtypeIdxs = type.getChildren(); - return new MapColumnVector(batchSize, createColumn(subtypeIdxs.get(0), batchSize), - createColumn(subtypeIdxs.get(1), batchSize)); + return new MapColumnVector(batchSize, createColumn(subtypeIdxs.get(0), batchSize, useDecimal64ColumnVectors), + createColumn(subtypeIdxs.get(1), batchSize, useDecimal64ColumnVectors)); default: throw new IllegalArgumentException("LLAP does not support " + type.getCategory()); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java index 2947c167fc..f06ec67276 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java @@ -24,37 +24,17 @@ import java.util.Arrays; import java.util.List; -import org.apache.hadoop.hive.llap.counters.LlapIOCounters; -import org.apache.orc.CompressionCodec; -import org.apache.orc.OrcProto.BloomFilterIndex; -import org.apache.orc.OrcProto.FileTail; -import org.apache.orc.OrcProto.RowIndex; -import org.apache.orc.OrcProto.Stream; -import org.apache.orc.OrcProto.StripeStatistics; -import org.apache.orc.TypeDescription; -import org.apache.orc.impl.BufferChunk; -import org.apache.orc.impl.DataReaderProperties; -import org.apache.orc.impl.InStream; -import org.apache.orc.impl.OrcCodecPool; -import org.apache.orc.impl.OrcIndex; -import org.apache.orc.impl.OrcTail; -import org.apache.orc.impl.ReaderImpl; -import org.apache.orc.impl.SchemaEvolution; -import org.apache.orc.impl.WriterImpl; -import org.apache.tez.common.counters.TezCounters; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.Pool; import org.apache.hadoop.hive.common.Pool.PoolObjectHelper; +import org.apache.hadoop.hive.common.io.Allocator; import org.apache.hadoop.hive.common.io.Allocator.BufferObjectFactory; import org.apache.hadoop.hive.common.io.DataCache; -import org.apache.hadoop.hive.common.io.Allocator; -import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData; import org.apache.hadoop.hive.common.io.DiskRange; import org.apache.hadoop.hive.common.io.DiskRangeList; +import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData; import org.apache.hadoop.hive.common.io.encoded.MemoryBuffer; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -65,38 +45,58 @@ import org.apache.hadoop.hive.llap.cache.LlapDataBuffer; import org.apache.hadoop.hive.llap.cache.LowLevelCache; import org.apache.hadoop.hive.llap.cache.LowLevelCache.Priority; +import org.apache.hadoop.hive.llap.counters.LlapIOCounters; import org.apache.hadoop.hive.llap.counters.QueryFragmentCounters; import org.apache.hadoop.hive.llap.io.api.impl.LlapIoImpl; import org.apache.hadoop.hive.llap.io.decode.ColumnVectorProducer.Includes; import org.apache.hadoop.hive.llap.io.decode.ColumnVectorProducer.SchemaEvolutionFactory; import org.apache.hadoop.hive.llap.io.decode.OrcEncodedDataConsumer; -import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata; import org.apache.hadoop.hive.llap.io.metadata.MetadataCache; import org.apache.hadoop.hive.llap.io.metadata.MetadataCache.LlapBufferOrBuffers; +import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata; import org.apache.hadoop.hive.llap.io.metadata.OrcStripeMetadata; import org.apache.hadoop.hive.ql.io.HdfsUtils; -import org.apache.orc.CompressionKind; -import org.apache.orc.DataReader; import org.apache.hadoop.hive.ql.io.orc.OrcFile; import org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions; -import org.apache.orc.OrcConf; import org.apache.hadoop.hive.ql.io.orc.OrcSplit; -import org.apache.hadoop.hive.ql.io.orc.encoded.Reader; import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl; import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedOrcFile; import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedReader; import org.apache.hadoop.hive.ql.io.orc.encoded.IoTrace; import org.apache.hadoop.hive.ql.io.orc.encoded.OrcBatchKey; +import org.apache.hadoop.hive.ql.io.orc.encoded.Reader; import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch; import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.PoolFactory; -import org.apache.orc.impl.RecordReaderUtils; -import org.apache.orc.StripeInformation; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hive.common.util.FixedSizedObjectPool; +import org.apache.orc.CompressionCodec; +import org.apache.orc.CompressionKind; +import org.apache.orc.DataReader; +import org.apache.orc.OrcConf; import org.apache.orc.OrcProto; +import org.apache.orc.OrcProto.BloomFilterIndex; +import org.apache.orc.OrcProto.FileTail; +import org.apache.orc.OrcProto.RowIndex; +import org.apache.orc.OrcProto.Stream; +import org.apache.orc.OrcProto.StripeStatistics; +import org.apache.orc.StripeInformation; +import org.apache.orc.TypeDescription; +import org.apache.orc.impl.BufferChunk; +import org.apache.orc.impl.DataReaderProperties; +import org.apache.orc.impl.InStream; +import org.apache.orc.impl.OrcCodecPool; +import org.apache.orc.impl.OrcIndex; +import org.apache.orc.impl.OrcTail; +import org.apache.orc.impl.ReaderImpl; +import org.apache.orc.impl.RecordReaderUtils; +import org.apache.orc.impl.SchemaEvolution; +import org.apache.orc.impl.WriterImpl; import org.apache.tez.common.CallableWithNdc; +import org.apache.tez.common.counters.TezCounters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; @@ -231,6 +231,8 @@ public OrcEncodedDataReader(LowLevelCache lowLevelCache, BufferUsageManager buff this.jobConf = jobConf; // TODO: setFileMetadata could just create schema. Called in two places; clean up later. this.evolution = sef.createSchemaEvolution(fileMetadata.getSchema()); + consumer.setUseDecimal64ColumnVectors(HiveConf.getBoolVar(jobConf, + ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS)); consumer.setFileMetadata(fileMetadata); consumer.setSchemaEvolution(evolution); } @@ -569,7 +571,8 @@ private OrcFileMetadata getFileFooterFromCacheOrDisk() throws IOException { stripes.add(new ReaderImpl.StripeInformationImpl(stripeProto)); } return new OrcFileMetadata( - fileKey, tail.getFooter(), tail.getPostscript(), stats, stripes); + fileKey, tail.getFooter(), tail.getPostscript(), stats, stripes, + ReaderImpl.getFileVersion(tail.getPostscript().getVersionList())); } finally { // We don't need the buffer anymore. metadataCache.decRefBuffer(tailBuffers); @@ -586,7 +589,7 @@ private OrcFileMetadata getFileFooterFromCacheOrDisk() throws IOException { } FileTail ft = orcReader.getFileTail(); return new OrcFileMetadata(fileKey, ft.getFooter(), ft.getPostscript(), - orcReader.getOrcProtoStripeStatistics(), orcReader.getStripes()); + orcReader.getOrcProtoStripeStatistics(), orcReader.getStripes(), orcReader.getFileVersion()); } private OrcProto.StripeFooter buildStripeFooter( diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java index bed5887022..c35de0e76e 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java @@ -221,6 +221,9 @@ public MemoryBuffer create() { this.sourceSerDe = sourceSerDe; this.reporter = reporter; this.jobConf = jobConf; + final boolean useDecimal64ColumnVectors = HiveConf.getBoolVar(jobConf, ConfVars + .HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS); + consumer.setUseDecimal64ColumnVectors(useDecimal64ColumnVectors); this.schema = schema; this.writerIncludes = OrcInputFormat.genIncludedColumns(schema, columnIds); SchemaEvolution evolution = new SchemaEvolution(schema, null, diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java index de19b1d78b..f1e51e8e15 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/VectorDeserializeOrcWriter.java @@ -20,14 +20,19 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; +import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.llap.DebugUtils; @@ -35,10 +40,12 @@ import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader.CacheWriter; import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader.DeserializerOrcWriter; import org.apache.hadoop.hive.llap.io.encoded.SerDeEncodedDataReader.EncodingWriter; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.orc.Writer; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -46,14 +53,20 @@ import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.lazy.fast.LazySimpleDeserializeRead; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; +import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.BinaryComparable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.InputFormat; @@ -121,12 +134,12 @@ private VectorDeserializeOrcWriter(Configuration conf, Properties tblProps, int allocSize) throws IOException { super(sourceOi, allocSize); // See also: the usage of VectorDeserializeType, for binary. For now, we only want text. - this.vrbCtx = createVrbCtx(sourceOi); + this.vrbCtx = createVrbCtx(sourceOi, tblProps); this.sourceIncludes = sourceIncludes; this.cacheIncludes = cacheIncludes; this.sourceBatch = vrbCtx.createVectorizedRowBatch(); deserializeRead = new LazySimpleDeserializeRead(vrbCtx.getRowColumnTypeInfos(), - /* useExternalBuffer */ true, createSerdeParams(conf, tblProps)); + vrbCtx.getRowdataTypePhysicalVariations(),/* useExternalBuffer */ true, createSerdeParams(conf, tblProps)); vectorDeserializeRow = new VectorDeserializeRow(deserializeRead); int colCount = vrbCtx.getRowColumnTypeInfos().length; boolean[] includes = null; @@ -192,13 +205,40 @@ public void startAsync(AsyncCallback callback) { this.orcThread.start(); } - private static VectorizedRowBatchCtx createVrbCtx(StructObjectInspector oi) throws IOException { + private static VectorizedRowBatchCtx createVrbCtx(StructObjectInspector oi, final Properties tblProps) throws IOException { + final String serde = tblProps.getProperty(serdeConstants.SERIALIZATION_LIB); + final String inputFormat = tblProps.getProperty(hive_metastoreConstants.FILE_INPUT_FORMAT); + final boolean isTextFormat = inputFormat != null && inputFormat.equals(TextInputFormat.class.getName()) && + serde != null && serde.equals(LazySimpleSerDe.class.getName()); + List dataTypePhysicalVariations = new ArrayList<>(); + if (isTextFormat) { + Set supportSet = new HashSet<>(Arrays.asList(VectorizedSupport.Support.values())); + StructTypeInfo structTypeInfo = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(oi); + int dataColumnCount = structTypeInfo.getAllStructFieldTypeInfos().size(); + final boolean isSupportDecimal64 = supportSet.contains(VectorizedSupport.Support.DECIMAL_64); + for (int i = 0; i < dataColumnCount; i++) { + DataTypePhysicalVariation dataTypePhysicalVariation = DataTypePhysicalVariation.NONE; + if (isSupportDecimal64) { + TypeInfo typeInfo = structTypeInfo.getAllStructFieldTypeInfos().get(i); + if (typeInfo instanceof DecimalTypeInfo) { + DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; + if (HiveDecimalWritable.isPrecisionDecimal64(decimalTypeInfo.precision())) { + dataTypePhysicalVariation = DataTypePhysicalVariation.DECIMAL_64; + } + } + } + dataTypePhysicalVariations.add(dataTypePhysicalVariation); + } + } VectorizedRowBatchCtx vrbCtx = new VectorizedRowBatchCtx(); try { vrbCtx.init(oi, new String[0]); } catch (HiveException e) { throw new IOException(e); } + if (!dataTypePhysicalVariations.isEmpty()) { + vrbCtx.setRowDataTypePhysicalVariations(dataTypePhysicalVariations.toArray(new DataTypePhysicalVariation[0])); + } return vrbCtx; } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java index 89ad4aa8cd..d6b16efe29 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java @@ -21,6 +21,7 @@ import org.apache.orc.CompressionKind; import org.apache.orc.FileFormatException; +import org.apache.orc.OrcFile; import org.apache.orc.OrcProto.Type; import org.apache.orc.TypeDescription; @@ -29,4 +30,5 @@ CompressionKind getCompressionKind(); List getTypes(); TypeDescription getSchema() throws FileFormatException; + OrcFile.Version getFileVersion(); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java index 5cd6f9fa2c..5eb713cc68 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java @@ -50,9 +50,10 @@ private final long contentLength; private final long numberOfRows; private final boolean isOriginalFormat; + private final OrcFile.Version fileVersion; public OrcFileMetadata(Object fileKey, OrcProto.Footer footer, OrcProto.PostScript ps, - List stats, List stripes) { + List stats, List stripes, final OrcFile.Version fileVersion) { this.stripeStats = stats; this.compressionKind = CompressionKind.valueOf(ps.getCompression().name()); this.compressionBufferSize = (int)ps.getCompressionBlockSize(); @@ -67,6 +68,7 @@ public OrcFileMetadata(Object fileKey, OrcProto.Footer footer, OrcProto.PostScri this.numberOfRows = footer.getNumberOfRows(); this.fileStats = footer.getStatisticsList(); this.fileKey = fileKey; + this.fileVersion = fileVersion; } // FileMetadata @@ -163,4 +165,9 @@ public int getStripeCount() { public TypeDescription getSchema() throws FileFormatException { return OrcUtils.convertTypeFromProtobuf(this.types, 0); } + + @Override + public OrcFile.Version getFileVersion() { + return fileVersion; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java index 224690109e..183fae5b9d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java @@ -214,8 +214,8 @@ public void setWork(FetchWork work) { private static final Map inputFormats = new HashMap(); @SuppressWarnings("unchecked") - static InputFormat getInputFormatFromCache( - Class inputFormatClass, JobConf conf) throws IOException { + public static InputFormat getInputFormatFromCache( + Class inputFormatClass, Configuration conf) throws IOException { if (Configurable.class.isAssignableFrom(inputFormatClass) || JobConfigurable.class.isAssignableFrom(inputFormatClass)) { return ReflectionUtil.newInstance(inputFormatClass, conf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java index e74b185345..8ee59e4173 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedInputFormatInterface.java @@ -24,4 +24,5 @@ */ public interface VectorizedInputFormatInterface { + VectorizedSupport.Support[] getSupportedFeatures(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java index 6588385b9f..ffbfb6f89f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java @@ -163,6 +163,11 @@ public VectorizedRowBatchCtx( return rowDataTypePhysicalVariations; } + public void setRowDataTypePhysicalVariations( + final DataTypePhysicalVariation[] rowDataTypePhysicalVariations) { + this.rowDataTypePhysicalVariations = rowDataTypePhysicalVariations; + } + public int[] getDataColumnNums() { return dataColumnNums; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java index e632d43ee8..6434414df3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java @@ -29,6 +29,7 @@ import java.io.IOException; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.io.NullWritable; @@ -50,6 +51,11 @@ static final int MAX_ROW = 100; // to prevent infinite loop static final Logger LOG = LoggerFactory.getLogger(NullRowsRecordReader.class.getName()); + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return null; + } + public static class DummyInputSplit extends FileSplit { @SuppressWarnings("unused") // Serialization ctor. private DummyInputSplit() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java index 31338d761e..9551a2a8dc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java @@ -60,6 +60,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.AcidInputFormat; import org.apache.hadoop.hive.ql.io.AcidOutputFormat; import org.apache.hadoop.hive.ql.io.AcidUtils; @@ -161,6 +162,11 @@ SelfDescribingInputFormatInterface, AcidInputFormat, CombineHiveInputFormat.AvoidSplitCombination, BatchToRowInputFormat { + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return new VectorizedSupport.Support[] {VectorizedSupport.Support.DECIMAL_64}; + } + static enum SplitStrategyKind { HYBRID, BI, @@ -328,7 +334,7 @@ public static RecordReader createReaderFromFile(Reader file, List types = OrcUtils.getOrcTypes(schema); options.include(genIncludedColumns(schema, conf)); setSearchArgument(options, types, conf, isOriginal); - return file.rowsOptions(options); + return file.rowsOptions(options, conf); } public static boolean isOriginal(Reader file) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java index 8c7c72e056..d81921c6e6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java @@ -23,6 +23,7 @@ import java.util.TreeMap; import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -231,16 +232,17 @@ public String toString() { * @param maxKey only return keys less than or equal to maxKey if it is * non-null * @param options options to provide to read the rows. + * @param conf * @throws IOException */ @VisibleForTesting ReaderPairAcid(ReaderKey key, Reader reader, - RecordIdentifier minKey, RecordIdentifier maxKey, - ReaderImpl.Options options) throws IOException { + RecordIdentifier minKey, RecordIdentifier maxKey, + ReaderImpl.Options options, final Configuration conf) throws IOException { this.reader = reader; this.key = key; // TODO use stripe statistics to jump over stripes - recordReader = reader.rowsOptions(options); + recordReader = reader.rowsOptions(options, conf); this.minKey = minKey; this.maxKey = maxKey; // advance the reader until we reach the minimum key @@ -437,7 +439,7 @@ static int encodeBucketId(Configuration conf, int bucketId, int statementId) { RecordIdentifier newMinKey = minKey; RecordIdentifier newMaxKey = maxKey; - recordReader = reader.rowsOptions(options); + recordReader = reader.rowsOptions(options, conf); /** * Logically each bucket consists of 0000_0, 0000_0_copy_1... 0000_0_copy_N. etc We don't * know N a priori so if this is true, then the current split is from 0000_0_copy_N file. @@ -586,7 +588,7 @@ public void next(OrcStruct next) throws IOException { throw new IllegalStateException("No 'original' files found for bucketId=" + this.bucketId + " in " + mergerOptions.getRootPath()); } - recordReader = getReader().rowsOptions(options); + recordReader = getReader().rowsOptions(options, conf); next(nextRecord());//load 1st row } @Override public RecordReader getRecordReader() { @@ -620,7 +622,7 @@ public void next(OrcStruct next) throws IOException { nextRecord = null; return; } - recordReader = reader.rowsOptions(options); + recordReader = reader.rowsOptions(options, conf); } } } @@ -1040,7 +1042,7 @@ public Options clone() { //required (on Tez) that base_x/ doesn't have a file for 'bucket' reader = OrcFile.createReader(bucketPath, OrcFile.readerOptions(conf)); pair = new ReaderPairAcid(key, reader, keyInterval.getMinKey(), keyInterval.getMaxKey(), - eventOptions); + eventOptions, conf); } else { pair = new EmptyReaderPair(); @@ -1050,7 +1052,7 @@ public Options clone() { else { assert reader != null : "no reader? " + mergerOptions.getRootPath(); pair = new ReaderPairAcid(key, reader, keyInterval.getMinKey(), keyInterval.getMaxKey(), - eventOptions); + eventOptions, conf); } } minKey = pair.getMinKey(); @@ -1107,7 +1109,7 @@ public Options clone() { //HIVE-17320: we should compute a SARG to push down min/max key to delete_delta Reader deltaReader = OrcFile.createReader(deltaFile, OrcFile.readerOptions(conf)); ReaderPair deltaPair = new ReaderPairAcid(key, deltaReader, minKey, maxKey, - deltaEventOptions); + deltaEventOptions, conf); if (deltaPair.nextRecord() != null) { readers.put(key, deltaPair); } @@ -1121,7 +1123,7 @@ public Options clone() { assert length >= 0; Reader deltaReader = OrcFile.createReader(deltaFile, OrcFile.readerOptions(conf).maxLength(length)); //must get statementId from file name since Acid 1.0 doesn't write it into bucketProperty - ReaderPairAcid deltaPair = new ReaderPairAcid(key, deltaReader, minKey, maxKey, deltaEventOptions); + ReaderPairAcid deltaPair = new ReaderPairAcid(key, deltaReader, minKey, maxKey, deltaEventOptions, conf); if (deltaPair.nextRecord() != null) { readers.put(key, deltaPair); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java index 7485e60749..8fd9b909fc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java @@ -20,6 +20,7 @@ import java.io.IOException; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -55,7 +56,16 @@ * @throws IOException */ RecordReader rowsOptions(Options options) throws IOException; - + + /** + * Create a RecordReader that reads everything with the given options. + * @param options the options to use + * @param conf conf object + * @return a new RecordReader + * @throws IOException + */ + RecordReader rowsOptions(Options options, Configuration conf) throws IOException; + /** * Create a RecordReader that will scan the entire file. * This is a legacy method and rowsOptions is preferred. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java index 1a6db1f2c2..171b02b77c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.nio.ByteBuffer; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -73,11 +74,17 @@ public RecordReader rows() throws IOException { @Override public RecordReader rowsOptions(Options options) throws IOException { + return rowsOptions(options, null); + } + + @Override + public RecordReader rowsOptions(Options options, Configuration conf) throws IOException { LOG.info("Reading ORC rows from " + path + " with " + options); - return new RecordReaderImpl(this, options); + return new RecordReaderImpl(this, options, conf); } + @Override public RecordReader rows(boolean[] include) throws IOException { return rowsOptions(new Options().include(include)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java index 5b001a0bbc..e06806831a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java @@ -23,8 +23,11 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; @@ -48,6 +51,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.apache.orc.OrcFile; import org.apache.orc.TypeDescription; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -60,9 +64,15 @@ private long baseRow; protected RecordReaderImpl(ReaderImpl fileReader, - Reader.Options options) throws IOException { + Reader.Options options, final Configuration conf) throws IOException { super(fileReader, options); - batch = this.schema.createRowBatch(); + final boolean useDecimal64ColumnVectors = conf != null && HiveConf.getBoolVar(conf, + HiveConf.ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS); + if (useDecimal64ColumnVectors){ + batch = this.schema.createRowBatchV2(); + } else { + batch = this.schema.createRowBatch(); + } rowInBatch = 0; } @@ -80,8 +90,8 @@ boolean ensureBatch() throws IOException { return true; } - public VectorizedRowBatch createRowBatch() { - return this.schema.createRowBatch(); + public VectorizedRowBatch createRowBatch(boolean useDecimal64) { + return useDecimal64 ? this.schema.createRowBatchV2() : this.schema.createRowBatch(); } @Override @@ -393,7 +403,12 @@ static HiveDecimalWritable nextDecimal(ColumnVector vector, } else { result = (HiveDecimalWritable) previous; } - result.set(((DecimalColumnVector) vector).vector[row]); + if (vector instanceof Decimal64ColumnVector) { + long value = ((Decimal64ColumnVector) vector).vector[row]; + result.deserialize64(value, ((Decimal64ColumnVector) vector).scale); + } else { + result.set(((DecimalColumnVector) vector).vector[row]); + } return result; } else { return null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java index d2e1a68312..8f5ecf0a81 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcAcidRowBatchReader.java @@ -110,7 +110,7 @@ final Reader reader = OrcInputFormat.createOrcReaderForSplit(conf, (OrcSplit) inputSplit); // Careful with the range here now, we do not want to read the whole base file like deltas. - innerReader = reader.rowsOptions(readerOptions.range(offset, length)); + innerReader = reader.rowsOptions(readerOptions.range(offset, length), conf); baseReader = new org.apache.hadoop.mapred.RecordReader() { @Override @@ -143,7 +143,13 @@ public float getProgress() throws IOException { return innerReader.getProgress(); } }; - this.vectorizedRowBatchBase = ((RecordReaderImpl) innerReader).createRowBatch(); + final boolean useDecimal64ColumnVectors = HiveConf + .getBoolVar(conf, ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS); + if (useDecimal64ColumnVectors) { + this.vectorizedRowBatchBase = ((RecordReaderImpl) innerReader).createRowBatch(true); + } else { + this.vectorizedRowBatchBase = ((RecordReaderImpl) innerReader).createRowBatch(false); + } } /** @@ -859,10 +865,16 @@ public String toString() { private final boolean isBucketedTable; DeleteReaderValue(Reader deleteDeltaReader, Reader.Options readerOptions, int bucket, - ValidWriteIdList validWriteIdList, boolean isBucketedTable) throws IOException { - this.recordReader = deleteDeltaReader.rowsOptions(readerOptions); + ValidWriteIdList validWriteIdList, boolean isBucketedTable, final JobConf conf) throws IOException { + this.recordReader = deleteDeltaReader.rowsOptions(readerOptions, conf); this.bucketForSplit = bucket; - this.batch = deleteDeltaReader.getSchema().createRowBatch(); + final boolean useDecimal64ColumnVector = HiveConf.getBoolVar(conf, ConfVars + .HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS); + if (useDecimal64ColumnVector) { + this.batch = deleteDeltaReader.getSchema().createRowBatchV2(); + } else { + this.batch = deleteDeltaReader.getSchema().createRowBatch(); + } if (!recordReader.nextBatch(batch)) { // Read the first batch. this.batch = null; // Oh! the first batch itself was null. Close the reader. } @@ -1054,7 +1066,7 @@ public int compareTo(CompressedOwid other) { throw new DeleteEventsOverflowMemoryException(); } DeleteReaderValue deleteReaderValue = new DeleteReaderValue(deleteDeltaReader, - readerOptions, bucket, validWriteIdList, isBucketedTable); + readerOptions, bucket, validWriteIdList, isBucketedTable, conf); DeleteRecordKey deleteRecordKey = new DeleteRecordKey(); if (deleteReaderValue.next(deleteRecordKey)) { sortMerger.put(deleteRecordKey, deleteReaderValue); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java index c581bba1c4..892fcc0843 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.InputFormatChecker; import org.apache.hadoop.hive.ql.io.SelfDescribingInputFormatInterface; @@ -99,7 +100,7 @@ options.include(OrcInputFormat.genIncludedColumns(schema, conf)); OrcInputFormat.setSearchArgument(options, types, conf, true); - this.reader = file.rowsOptions(options); + this.reader = file.rowsOptions(options, conf); int partitionColumnCount = rbCtx.getPartitionColumnCount(); if (partitionColumnCount > 0) { @@ -204,4 +205,9 @@ public boolean validateInput(FileSystem fs, HiveConf conf, } return true; } + + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return new VectorizedSupport.Support[] {VectorizedSupport.Support.DECIMAL_64}; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java index 71682af364..b0102cc97f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java @@ -24,6 +24,8 @@ import java.util.Map; import java.util.Set; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -63,6 +65,7 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.orc.PhysicalWriter; +import org.apache.orc.TypeDescription; /** * An ORC file writer. The file is divided into stripes, which is the natural @@ -93,7 +96,14 @@ OrcFile.WriterOptions opts) throws IOException { super(fs, path, opts); this.inspector = opts.getInspector(); - this.internalBatch = opts.getSchema().createRowBatch(opts.getBatchSize()); + boolean useDecimal64ColumnVectors = opts.getConfiguration() != null && + HiveConf.getBoolVar(opts.getConfiguration(), HiveConf.ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS); + if (useDecimal64ColumnVectors) { + this.internalBatch = opts.getSchema().createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64, + opts.getBatchSize()); + } else { + this.internalBatch = opts.getSchema().createRowBatch(opts.getBatchSize()); + } this.fields = initializeFieldsFromOi(inspector); } @@ -207,9 +217,15 @@ static void setColumn(int rowId, ColumnVector column, break; } case DECIMAL: { - DecimalColumnVector vector = (DecimalColumnVector) column; - vector.set(rowId, ((HiveDecimalObjectInspector) inspector) + if (column instanceof Decimal64ColumnVector) { + Decimal64ColumnVector vector = (Decimal64ColumnVector) column; + vector.set(rowId, ((HiveDecimalObjectInspector) inspector) + .getPrimitiveWritableObject(obj)); + } else { + DecimalColumnVector vector = (DecimalColumnVector) column; + vector.set(rowId, ((HiveDecimalObjectInspector) inspector) .getPrimitiveWritableObject(obj)); + } break; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java index c9078be208..3dc9079b0f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.ql.io.orc.encoded; +import org.apache.orc.OrcFile; import org.apache.orc.impl.RunLengthByteReader; import java.io.IOException; @@ -1200,6 +1201,147 @@ public static StreamReaderBuilder builder() { } } + protected static class Decimal64StreamReader extends Decimal64TreeReader implements SettableTreeReader { + private boolean _isFileCompressed; + private SettableUncompressedStream _presentStream; + private SettableUncompressedStream _valueStream; + private List vectors; + private int vectorIndex = 0; + + private Decimal64StreamReader(int columnId, int precision, int scale, + SettableUncompressedStream presentStream, + SettableUncompressedStream valueStream, + boolean isFileCompressed, + OrcProto.ColumnEncoding encoding, TreeReaderFactory.Context context, + List vectors) throws IOException { + super(columnId, presentStream, valueStream, encoding, + precision, scale, context); + this._isFileCompressed = isFileCompressed; + this._presentStream = presentStream; + this._valueStream = valueStream; + this.vectors = vectors; + } + + @Override + public void seek(PositionProvider index) throws IOException { + if (vectors != null) return; + if (present != null) { + if (_isFileCompressed) { + index.getNext(); + } + present.seek(index); + } + + // data stream could be empty stream or already reached end of stream before present stream. + // This can happen if all values in stream are nulls or last row group values are all null. + skipCompressedIndex(_isFileCompressed, index); + if (_valueStream.available() > 0) { + valueReader.seek(index); + } else { + skipSeek(index); + } + } + + @Override + public void nextVector( + ColumnVector previousVector, boolean[] isNull, int batchSize) throws IOException { + if (vectors == null) { + super.nextVector(previousVector, isNull, batchSize); + return; + } + vectors.get(vectorIndex++).shallowCopyTo(previousVector); + if (vectorIndex == vectors.size()) { + vectors = null; + } + } + + @Override + public void setBuffers(EncodedColumnBatch batch, boolean sameStripe) { + assert vectors == null; // See the comment in TimestampStreamReader.setBuffers. + ColumnStreamData[] streamsData = batch.getColumnData(columnId); + if (_presentStream != null) { + _presentStream.setBuffers(StreamUtils.createDiskRangeInfo(streamsData[OrcProto.Stream.Kind.PRESENT_VALUE])); + } + if (_valueStream != null) { + _valueStream.setBuffers(StreamUtils.createDiskRangeInfo(streamsData[OrcProto.Stream.Kind.DATA_VALUE])); + } + } + + public static class StreamReaderBuilder { + private int columnIndex; + private ColumnStreamData presentStream; + private ColumnStreamData valueStream; + private int scale; + private int precision; + private CompressionCodec compressionCodec; + private OrcProto.ColumnEncoding columnEncoding; + private List vectors; + private TreeReaderFactory.Context context; + + public StreamReaderBuilder setColumnIndex(int columnIndex) { + this.columnIndex = columnIndex; + return this; + } + + public StreamReaderBuilder setPrecision(int precision) { + this.precision = precision; + return this; + } + + public StreamReaderBuilder setScale(int scale) { + this.scale = scale; + return this; + } + + public StreamReaderBuilder setContext(TreeReaderFactory.Context context) { + this.context = context; + return this; + } + + public StreamReaderBuilder setPresentStream(ColumnStreamData presentStream) { + this.presentStream = presentStream; + return this; + } + + public StreamReaderBuilder setValueStream(ColumnStreamData valueStream) { + this.valueStream = valueStream; + return this; + } + + + public StreamReaderBuilder setCompressionCodec(CompressionCodec compressionCodec) { + this.compressionCodec = compressionCodec; + return this; + } + + public StreamReaderBuilder setColumnEncoding(OrcProto.ColumnEncoding encoding) { + this.columnEncoding = encoding; + return this; + } + + public Decimal64StreamReader build() throws IOException { + SettableUncompressedStream presentInStream = StreamUtils.createSettableUncompressedStream( + OrcProto.Stream.Kind.PRESENT.name(), presentStream); + + SettableUncompressedStream valueInStream = StreamUtils.createSettableUncompressedStream( + OrcProto.Stream.Kind.DATA.name(), valueStream); + + boolean isFileCompressed = compressionCodec != null; + return new Decimal64StreamReader(columnIndex, precision, scale, presentInStream, + valueInStream, isFileCompressed, columnEncoding, context, vectors); + } + + public StreamReaderBuilder setVectors(List vectors) { + this.vectors = vectors; + return this; + } + } + + public static StreamReaderBuilder builder() { + return new StreamReaderBuilder(); + } + } + protected static class DateStreamReader extends DateTreeReader implements SettableTreeReader { private boolean isFileCompressed; private SettableUncompressedStream _presentStream; @@ -2101,8 +2243,8 @@ public static StreamReaderBuilder builder() { } public static StructTreeReader createRootTreeReader(TypeDescription[] batchSchemas, - List encodings, OrcEncodedColumnBatch batch, - CompressionCodec codec, TreeReaderFactory.Context context) throws IOException { + List encodings, OrcEncodedColumnBatch batch, + CompressionCodec codec, Context context, final boolean useDecimal64ColumnVectors) throws IOException { // Note: we only look at the schema here to deal with complex types. Somebody has set up the // reader with whatever ideas they had to the schema and we just trust the reader to // produce the CVBs that was asked for. However, we only need to look at top level columns. @@ -2117,7 +2259,8 @@ public static StructTreeReader createRootTreeReader(TypeDescription[] batchSchem if (!batch.hasData(batchColIx) && !batch.hasVectors(batchColIx)) { throw new AssertionError("No data for column " + batchColIx + ": " + batchSchemas[i]); } - childReaders[i] = createEncodedTreeReader(batchSchemas[i], encodings, batch, codec, context); + childReaders[i] = createEncodedTreeReader(batchSchemas[i], encodings, batch, codec, context, + useDecimal64ColumnVectors); } // TODO: do we actually need this reader? the caller just extracts child readers. @@ -2138,8 +2281,8 @@ private static void skipSeek(PositionProvider index) { private static TreeReader createEncodedTreeReader(TypeDescription schema, - List encodings, OrcEncodedColumnBatch batch, - CompressionCodec codec, TreeReaderFactory.Context context) throws IOException { + List encodings, OrcEncodedColumnBatch batch, + CompressionCodec codec, Context context, final boolean useDecimal64ColumnVectors) throws IOException { int columnIndex = schema.getId(); ColumnStreamData[] streamBuffers = null; List vectors = null; @@ -2200,12 +2343,12 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, case TIMESTAMP: case DATE: return getPrimitiveTreeReader(columnIndex, schema, codec, columnEncoding, - present, data, dictionary, lengths, secondary, context, vectors); + present, data, dictionary, lengths, secondary, context, vectors, useDecimal64ColumnVectors); case LIST: assert vectors == null; // Not currently supported. TypeDescription elementType = schema.getChildren().get(0); TreeReader elementReader = createEncodedTreeReader( - elementType, encodings, batch, codec, context); + elementType, encodings, batch, codec, context, useDecimal64ColumnVectors); return ListStreamReader.builder() .setColumnIndex(columnIndex) .setColumnEncoding(columnEncoding) @@ -2220,9 +2363,9 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, TypeDescription keyType = schema.getChildren().get(0); TypeDescription valueType = schema.getChildren().get(1); TreeReader keyReader = createEncodedTreeReader( - keyType, encodings, batch, codec, context); + keyType, encodings, batch, codec, context, useDecimal64ColumnVectors); TreeReader valueReader = createEncodedTreeReader( - valueType, encodings, batch, codec, context); + valueType, encodings, batch, codec, context, useDecimal64ColumnVectors); return MapStreamReader.builder() .setColumnIndex(columnIndex) .setColumnEncoding(columnEncoding) @@ -2240,7 +2383,7 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, for (int i = 0; i < childCount; i++) { TypeDescription childType = schema.getChildren().get(i); childReaders[i] = createEncodedTreeReader( - childType, encodings, batch, codec, context); + childType, encodings, batch, codec, context, useDecimal64ColumnVectors); } return StructStreamReader.builder() .setColumnIndex(columnIndex) @@ -2258,7 +2401,7 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, for (int i = 0; i < childCount; i++) { TypeDescription childType = schema.getChildren().get(i); childReaders[i] = createEncodedTreeReader( - childType, encodings, batch, codec, context); + childType, encodings, batch, codec, context, useDecimal64ColumnVectors); } return UnionStreamReader.builder() .setColumnIndex(columnIndex) @@ -2276,10 +2419,10 @@ private static TreeReader createEncodedTreeReader(TypeDescription schema, } private static TreeReader getPrimitiveTreeReader(final int columnIndex, - TypeDescription columnType, CompressionCodec codec, OrcProto.ColumnEncoding columnEncoding, - ColumnStreamData present, ColumnStreamData data, ColumnStreamData dictionary, - ColumnStreamData lengths, ColumnStreamData secondary, TreeReaderFactory.Context context, - List vectors) throws IOException { + TypeDescription columnType, CompressionCodec codec, OrcProto.ColumnEncoding columnEncoding, + ColumnStreamData present, ColumnStreamData data, ColumnStreamData dictionary, + ColumnStreamData lengths, ColumnStreamData secondary, Context context, + List vectors, final boolean useDecimal64ColumnVectors) throws IOException { switch (columnType.getCategory()) { case BINARY: return BinaryStreamReader.builder() @@ -2390,7 +2533,21 @@ private static TreeReader getPrimitiveTreeReader(final int columnIndex, .setVectors(vectors) .build(); case DECIMAL: - return DecimalStreamReader.builder() + if ((context.getFileFormat() == OrcFile.Version.UNSTABLE_PRE_2_0 && + columnType.getPrecision() <= TypeDescription.MAX_DECIMAL64_PRECISION)) { + return Decimal64StreamReader.builder() + .setColumnIndex(columnIndex) + .setPrecision(columnType.getPrecision()) + .setScale(columnType.getScale()) + .setPresentStream(present) + .setValueStream(data) + .setCompressionCodec(codec) + .setColumnEncoding(columnEncoding) + .setVectors(vectors) + .setContext(context) + .build(); + } else { + return DecimalStreamReader.builder() .setColumnIndex(columnIndex) .setPrecision(columnType.getPrecision()) .setScale(columnType.getScale()) @@ -2402,6 +2559,7 @@ private static TreeReader getPrimitiveTreeReader(final int columnIndex, .setVectors(vectors) .setContext(context) .build(); + } case TIMESTAMP: return TimestampStreamReader.builder() .setColumnIndex(columnIndex) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java index ed6d577f8d..5e70a05d20 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.common.io.FileMetadataCache; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.InputFormatChecker; import org.apache.hadoop.hive.ql.io.LlapCacheOnlyInputFormatInterface; @@ -115,4 +116,9 @@ public boolean validateInput(FileSystem fs, HiveConf conf, List file return true; } + + @Override + public VectorizedSupport.Support[] getSupportedFeatures() { + return null; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index 394f826508..a9cbdc9368 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -20,6 +20,7 @@ import static org.apache.hadoop.hive.ql.plan.ReduceSinkDesc.ReducerTraits.UNIFORM; +import java.io.IOException; import java.io.Serializable; import java.lang.annotation.Annotation; import java.util.ArrayList; @@ -40,6 +41,7 @@ import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.exec.vector.reducesink.*; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.parse.spark.SparkPartitionPruningSinkOperator; @@ -1207,6 +1209,14 @@ private void determineDataColumnNums(TableScanOperator tableScanOperator, private Support[] getVectorizedInputFormatSupports( Class inputFileFormatClass) { + try { + InputFormat inputFormat = FetchOperator.getInputFormatFromCache(inputFileFormatClass, hiveConf); + if (inputFormat instanceof VectorizedInputFormatInterface) { + return ((VectorizedInputFormatInterface) inputFormat).getSupportedFeatures(); + } + } catch (IOException e) { + LOG.error("Unable to instantiate {} input format class. Cannot determine vectorization support.", e); + } // FUTURE: Decide how to ask an input file format what vectorization features it supports. return null; } @@ -1830,11 +1840,12 @@ private void validateAndVectorizeMapWork(MapWork mapWork, VectorTaskColumnInfo v supportRemovedReasons.add(removeString); } - // And, if LLAP is enabled for now, disable DECIMAL_64; - if (isLlapIoEnabled && supportSet.contains(Support.DECIMAL_64)) { + // disable DECIMAL_64 if requested via config; + if (!hiveConf.getBoolVar(ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS) && + supportSet.contains(Support.DECIMAL_64)) { supportSet.remove(Support.DECIMAL_64); String removeString = - "DECIMAL_64 disabled because LLAP is enabled"; + "DECIMAL_64 disabled as hive.exec.orc.use.decimal64.column.vectors is set to false"; supportRemovedReasons.add(removeString); } @@ -4386,6 +4397,9 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { if (index < size) { vectorSelectExprs = Arrays.copyOf(vectorSelectExprs, index); } + // if parent output type have NONE and if both child are decimal type with one DECIMAL_64 and other NONE then, + // insert a cast for the child with DECIMAL_64 + VectorExpression[] vse = vContext.getVectorExpressionsUpConvertDecimal64(colList); vectorSelectDesc.setSelectExpressions(vectorSelectExprs); vectorSelectDesc.setProjectedOutputColumns(projectedOutputColumns); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java index dc58ad18f5..c5e1b332bb 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java @@ -229,7 +229,7 @@ static String getColumnNamesProperty() { return "booleanValue,byteValue,shortValue,intValue,longValue,floatValue,doubleValue,stringValue,decimalValue,dateValue,timestampValue"; } static String getColumnTypesProperty() { - return "boolean:tinyint:smallint:int:bigint:float:double:string:decimal:date:timestamp"; + return "boolean:tinyint:smallint:int:bigint:float:double:string:decimal(38,18):date:timestamp"; } } @@ -3847,9 +3847,10 @@ public void testRowNumberUniquenessInDifferentSplits() throws Exception { * Test schema evolution when using the reader directly. */ @Test - public void testSchemaEvolution() throws Exception { + public void testSchemaEvolutionOldDecimal() throws Exception { TypeDescription fileSchema = TypeDescription.fromString("struct,d:string>"); + conf.setBoolean(ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS.varname, false); Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf) .fileSystem(fs) @@ -3914,6 +3915,78 @@ public void testSchemaEvolution() throws Exception { rows.close(); } + /** + * Test schema evolution when using the reader directly. + */ + @Test + public void testSchemaEvolutionDecimal64() throws Exception { + TypeDescription fileSchema = + TypeDescription.fromString("struct,d:string>"); + conf.setBoolean(ConfVars.HIVE_EXEC_ORC_USE_DECIMAL64_COLUMN_VECTORS.varname, true); + Writer writer = OrcFile.createWriter(testFilePath, + OrcFile.writerOptions(conf) + .fileSystem(fs) + .setSchema(fileSchema) + .compress(org.apache.orc.CompressionKind.NONE)); + VectorizedRowBatch batch = fileSchema.createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64,1000); + batch.size = 1000; + LongColumnVector lcv = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[0]); + for(int r=0; r < 1000; r++) { + ((LongColumnVector) batch.cols[0]).vector[r] = r * 42; + lcv.vector[r] = r * 10001; + ((BytesColumnVector) batch.cols[2]).setVal(r, + Integer.toHexString(r).getBytes(StandardCharsets.UTF_8)); + } + writer.addRowBatch(batch); + writer.close(); + TypeDescription readerSchema = TypeDescription.fromString( + "struct,d:string,future2:int>"); + Reader reader = OrcFile.createReader(testFilePath, + OrcFile.readerOptions(conf).filesystem(fs)); + RecordReader rows = reader.rowsOptions(new Reader.Options() + .schema(readerSchema)); + batch = readerSchema.createRowBatchV2(); + lcv = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[0]); + LongColumnVector future1 = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[1]); + assertEquals(true, rows.nextBatch(batch)); + assertEquals(1000, batch.size); + assertEquals(true, future1.isRepeating); + assertEquals(true, future1.isNull[0]); + assertEquals(true, batch.cols[3].isRepeating); + assertEquals(true, batch.cols[3].isNull[0]); + for(int r=0; r < batch.size; ++r) { + assertEquals("row " + r, r * 42, ((LongColumnVector) batch.cols[0]).vector[r]); + assertEquals("row " + r, r * 10001, lcv.vector[r]); + assertEquals("row " + r, r * 10001, lcv.vector[r]); + assertEquals("row " + r, Integer.toHexString(r), + ((BytesColumnVector) batch.cols[2]).toString(r)); + } + assertEquals(false, rows.nextBatch(batch)); + rows.close(); + + // try it again with an include vector + rows = reader.rowsOptions(new Reader.Options() + .schema(readerSchema) + .include(new boolean[]{false, true, true, true, false, false, true})); + batch = readerSchema.createRowBatchV2(); + lcv = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[0]); + future1 = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[1]); + assertEquals(true, rows.nextBatch(batch)); + assertEquals(1000, batch.size); + assertEquals(true, future1.isRepeating); + assertEquals(true, future1.isNull[0]); + assertEquals(true, batch.cols[3].isRepeating); + assertEquals(true, batch.cols[3].isNull[0]); + assertEquals(true, batch.cols[2].isRepeating); + assertEquals(true, batch.cols[2].isNull[0]); + for(int r=0; r < batch.size; ++r) { + assertEquals("row " + r, r * 42, ((LongColumnVector) batch.cols[0]).vector[r]); + assertEquals("row " + r, r * 10001, lcv.vector[r]); + } + assertEquals(false, rows.nextBatch(batch)); + rows.close(); + } + /** * Test column projection when using ACID. */ @@ -3933,7 +4006,7 @@ public void testColumnProjectionWithAcid() throws Exception { .fileSystem(fs) .setSchema(fileSchema) .compress(org.apache.orc.CompressionKind.NONE)); - VectorizedRowBatch batch = fileSchema.createRowBatch(1000); + VectorizedRowBatch batch = fileSchema.createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64,1000); batch.size = 1000; StructColumnVector scv = (StructColumnVector)batch.cols[5]; // operation @@ -4047,7 +4120,7 @@ public void testAcidReadPastLastStripeOffset() throws Exception { .stripeSize(128); // Create ORC file with small stripe size so we can write multiple stripes. Writer writer = OrcFile.createWriter(testFilePath, options); - VectorizedRowBatch batch = fileSchema.createRowBatch(1000); + VectorizedRowBatch batch = fileSchema.createRowBatch(TypeDescription.RowBatchVersion.USE_DECIMAL64,1000); batch.size = 1000; StructColumnVector scv = (StructColumnVector)batch.cols[5]; // operation diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java index d8a7af865d..cc29384e37 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java @@ -168,7 +168,7 @@ private Reader createMockReader() throws IOException { setRow(row4, OrcRecordUpdater.INSERT_OPERATION, 40, 50, 60, 130, "fourth"); OrcStruct row5 = new OrcStruct(OrcRecordUpdater.FIELDS); setRow(row5, OrcRecordUpdater.INSERT_OPERATION, 40, 50, 61, 140, "fifth"); - Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class))) + Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class), Mockito.any(HiveConf.class))) .thenReturn(recordReader); Mockito.when(recordReader.hasNext()). @@ -192,7 +192,7 @@ public void testReaderPair() throws Exception { RecordIdentifier minKey = new RecordIdentifier(10, 20, 30); RecordIdentifier maxKey = new RecordIdentifier(40, 50, 60); ReaderPair pair = new OrcRawRecordMerger.ReaderPairAcid(key, reader, minKey, maxKey, - new Reader.Options()); + new Reader.Options(), new HiveConf()); RecordReader recordReader = pair.getRecordReader(); assertEquals(10, key.getWriteId()); assertEquals(20, key.getBucketProperty()); @@ -218,7 +218,7 @@ public void testReaderPairNoMin() throws Exception { Reader reader = createMockReader(); ReaderPair pair = new OrcRawRecordMerger.ReaderPairAcid(key, reader, null, null, - new Reader.Options()); + new Reader.Options(), new HiveConf()); RecordReader recordReader = pair.getRecordReader(); assertEquals(10, key.getWriteId()); assertEquals(20, key.getBucketProperty()); @@ -274,7 +274,7 @@ private Reader createMockOriginalReader() throws IOException { OrcStruct row4 = createOriginalRow("fourth"); OrcStruct row5 = createOriginalRow("fifth"); - Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class))) + Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class), Mockito.any(HiveConf.class))) .thenReturn(recordReader); Mockito.when(recordReader.hasNext()). thenReturn(true, true, true, true, true, false); @@ -410,7 +410,7 @@ public void testNewBase() throws Exception { types.add(typeBuilder.build()); Mockito.when(reader.getTypes()).thenReturn(types); - Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class))) + Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class), Mockito.any(HiveConf.class))) .thenReturn(recordReader); OrcStruct row1 = new OrcStruct(OrcRecordUpdater.FIELDS); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java index 0c9c95d534..c23f00e8a6 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java @@ -49,6 +49,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.apache.orc.TypeDescription; import org.junit.Before; import org.junit.Test; @@ -151,7 +152,7 @@ private void checkVectorizedReader() throws Exception { OrcFile.readerOptions(conf)); RecordReaderImpl vrr = (RecordReaderImpl) vreader.rows(); RecordReaderImpl rr = (RecordReaderImpl) reader.rows(); - VectorizedRowBatch batch = reader.getSchema().createRowBatch(); + VectorizedRowBatch batch = reader.getSchema().createRowBatchV2(); OrcStruct row = null; // Check Vectorized ORC reader against ORC row reader diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java index e478371309..551e5ca0a6 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedOrcAcidRowBatchReader.java @@ -226,7 +226,7 @@ private void testVectorizedOrcAcidRowBatchReader(String deleteEventRegistry) thr assertTrue(vectorizedReader.getDeleteEventRegistry() instanceof SortMergedDeleteEventRegistry); } TypeDescription schema = OrcInputFormat.getDesiredRowTypeDescr(conf, true, Integer.MAX_VALUE); - VectorizedRowBatch vectorizedRowBatch = schema.createRowBatch(); + VectorizedRowBatch vectorizedRowBatch = schema.createRowBatchV2(); vectorizedRowBatch.setPartitionInfo(1, 0); // set data column count as 1. long previousPayload = Long.MIN_VALUE; while (vectorizedReader.next(null, vectorizedRowBatch)) { diff --git a/ql/src/test/queries/clientpositive/llap_acid2.q b/ql/src/test/queries/clientpositive/llap_acid2.q index a409c26aff..cd06d313b9 100644 --- a/ql/src/test/queries/clientpositive/llap_acid2.q +++ b/ql/src/test/queries/clientpositive/llap_acid2.q @@ -29,18 +29,27 @@ CREATE TABLE orc_llap_n2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='true'); + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='true','orc.write.format'='UNSTABLE-PRE-2.0'); insert into table orc_llap_n2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30; - + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30; +alter table orc_llap_n2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12'); +insert into table orc_llap_n2 +select cint, cbigint, cfloat, cdouble, + cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, + cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, + cstring1, cfloat as c9, cast("3.321" as decimal(10,3))as c10, + cast("9.987654321" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30; CREATE TABLE orc_llap2 ( @@ -57,18 +66,22 @@ CREATE TABLE orc_llap2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='false'); + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='false', 'orc.write.format'='UNSTABLE-PRE-2.0'); insert into table orc_llap2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30; + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30; -alter table orc_llap2 set TBLPROPERTIES ('transactional'='true'); +alter table orc_llap2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12'); -update orc_llap2 set cstring1 = 'testvalue' where cstring1 = 'N016jPED08o'; +update orc_llap2 set cstring1 = 'testvalue', cdecimal1 = cast("3.321" as decimal(10,3)), +cdecimal2 = cast("9.987654321" as decimal(38,18)) where cstring1 = 'N016jPED08o'; SET hive.llap.io.enabled=true; diff --git a/ql/src/test/queries/clientpositive/llap_decimal64_reader.q b/ql/src/test/queries/clientpositive/llap_decimal64_reader.q new file mode 100644 index 0000000000..c602ed6e16 --- /dev/null +++ b/ql/src/test/queries/clientpositive/llap_decimal64_reader.q @@ -0,0 +1,54 @@ +--! qt:dataset:alltypesorc +SET hive.vectorized.execution.enabled=true; + +SET hive.llap.io.enabled=false; + +SET hive.exec.orc.default.row.index.stride=1000; +SET hive.optimize.index.filter=true; +set hive.auto.convert.join=false; + +DROP TABLE orc_llap_n0; + +-- this test mix and matches orc versions and flips config to use decimal64 column vectors +set hive.auto.convert.join=true; +SET hive.llap.io.enabled=true; +CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE"); + +insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; + +alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0'); + +insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("4.456" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; + +set hive.exec.orc.use.decimal64.column.vectors=true; +explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; +select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; + +set hive.exec.orc.use.decimal64.column.vectors=false; +explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; +select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2; + +DROP TABLE orc_llap_n0; diff --git a/ql/src/test/queries/clientpositive/llap_uncompressed.q b/ql/src/test/queries/clientpositive/llap_uncompressed.q index 875356c73c..de3cdc600e 100644 --- a/ql/src/test/queries/clientpositive/llap_uncompressed.q +++ b/ql/src/test/queries/clientpositive/llap_uncompressed.q @@ -24,13 +24,20 @@ CREATE TABLE orc_llap_n0( ctimestamp1 TIMESTAMP, ctimestamp2 TIMESTAMP, cboolean1 BOOLEAN, - cboolean2 BOOLEAN) + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) STORED AS ORC tblproperties ("orc.compress"="NONE"); insert into table orc_llap_n0 -select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 -from alltypesorc; +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; +alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0'); + +insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc; SET hive.llap.io.enabled=true; diff --git a/ql/src/test/results/clientpositive/llap/llap_acid2.q.out b/ql/src/test/results/clientpositive/llap/llap_acid2.q.out index 4d74a17e08..c3e9c2a214 100644 --- a/ql/src/test/results/clientpositive/llap/llap_acid2.q.out +++ b/ql/src/test/results/clientpositive/llap/llap_acid2.q.out @@ -16,8 +16,10 @@ PREHOOK: query: CREATE TABLE orc_llap_n2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='true') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='true','orc.write.format'='UNSTABLE-PRE-2.0') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_llap_n2 @@ -35,8 +37,10 @@ POSTHOOK: query: CREATE TABLE orc_llap_n2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='true') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='true','orc.write.format'='UNSTABLE-PRE-2.0') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_llap_n2 @@ -44,7 +48,8 @@ PREHOOK: query: insert into table orc_llap_n2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc PREHOOK: Output: default@orc_llap_n2 @@ -52,13 +57,58 @@ POSTHOOK: query: insert into table orc_llap_n2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc POSTHOOK: Output: default@orc_llap_n2 POSTHOOK: Lineage: orc_llap_n2.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cbigint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cbigint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n2.cdecimal2 EXPRESSION [] +POSTHOOK: Lineage: orc_llap_n2.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdouble0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdouble1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cfloat2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +PREHOOK: query: alter table orc_llap_n2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@orc_llap_n2 +PREHOOK: Output: default@orc_llap_n2 +POSTHOOK: query: alter table orc_llap_n2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@orc_llap_n2 +POSTHOOK: Output: default@orc_llap_n2 +PREHOOK: query: insert into table orc_llap_n2 +select cint, cbigint, cfloat, cdouble, + cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, + cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, + cstring1, cfloat as c9, cast("3.321" as decimal(10,3))as c10, + cast("9.987654321" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n2 +POSTHOOK: query: insert into table orc_llap_n2 +select cint, cbigint, cfloat, cdouble, + cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, + cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, + cstring1, cfloat as c9, cast("3.321" as decimal(10,3))as c10, + cast("9.987654321" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n2 +POSTHOOK: Lineage: orc_llap_n2.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cbigint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cbigint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n2.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n2.cdecimal2 EXPRESSION [] POSTHOOK: Lineage: orc_llap_n2.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cdouble0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap_n2.cdouble1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] @@ -84,8 +134,10 @@ PREHOOK: query: CREATE TABLE orc_llap2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='false') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='false', 'orc.write.format'='UNSTABLE-PRE-2.0') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@orc_llap2 @@ -103,8 +155,10 @@ POSTHOOK: query: CREATE TABLE orc_llap2 ( cfloat1 FLOAT, cdouble1 DOUBLE, cstring1 string, - cfloat2 float -) stored as orc TBLPROPERTIES ('transactional'='false') + cfloat2 float, + cdecimal1 decimal(10,3), + cdecimal2 decimal(38,10) +) stored as orc TBLPROPERTIES ('transactional'='false', 'orc.write.format'='UNSTABLE-PRE-2.0') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@orc_llap2 @@ -112,7 +166,8 @@ PREHOOK: query: insert into table orc_llap2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc PREHOOK: Output: default@orc_llap2 @@ -120,13 +175,16 @@ POSTHOOK: query: insert into table orc_llap2 select cint, cbigint, cfloat, cdouble, cint as c1, cbigint as c2, cfloat as c3, cdouble as c4, cint as c8, cbigint as c7, cfloat as c6, cdouble as c5, - cstring1, cfloat as c9 from alltypesorc order by cdouble asc limit 30 + cstring1, cfloat as c9, cast("1.123" as decimal(10,3))as c10, + cast("1.123456789" as decimal(38,18)) as c11 from alltypesorc order by cdouble asc limit 30 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc POSTHOOK: Output: default@orc_llap2 POSTHOOK: Lineage: orc_llap2.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap2.cbigint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: orc_llap2.cbigint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap2.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap2.cdecimal2 EXPRESSION [] POSTHOOK: Lineage: orc_llap2.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap2.cdouble0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: orc_llap2.cdouble1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] @@ -138,19 +196,21 @@ POSTHOOK: Lineage: orc_llap2.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(n POSTHOOK: Lineage: orc_llap2.cint0 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] POSTHOOK: Lineage: orc_llap2.cint1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] POSTHOOK: Lineage: orc_llap2.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] -PREHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true') +PREHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@orc_llap2 PREHOOK: Output: default@orc_llap2 -POSTHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true') +POSTHOOK: query: alter table orc_llap2 set TBLPROPERTIES ('transactional'='true','orc.write.format'='0.12') POSTHOOK: type: ALTERTABLE_PROPERTIES POSTHOOK: Input: default@orc_llap2 POSTHOOK: Output: default@orc_llap2 -PREHOOK: query: update orc_llap2 set cstring1 = 'testvalue' where cstring1 = 'N016jPED08o' +PREHOOK: query: update orc_llap2 set cstring1 = 'testvalue', cdecimal1 = cast("3.321" as decimal(10,3)), +cdecimal2 = cast("9.987654321" as decimal(38,18)) where cstring1 = 'N016jPED08o' PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap2 PREHOOK: Output: default@orc_llap2 -POSTHOOK: query: update orc_llap2 set cstring1 = 'testvalue' where cstring1 = 'N016jPED08o' +POSTHOOK: query: update orc_llap2 set cstring1 = 'testvalue', cdecimal1 = cast("3.321" as decimal(10,3)), +cdecimal2 = cast("9.987654321" as decimal(38,18)) where cstring1 = 'N016jPED08o' POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_llap2 POSTHOOK: Output: default@orc_llap2 @@ -192,6 +252,36 @@ xTlDv24JYv4s 7wH3hBKdO55Xq3gEEe0 5QLs0LVK1g ET3d4F2I4lV +N016jPED08o +Q1JAdUlCVORmR0Q5X5Vf5u6 +eNsh5tYa +5j7GJ8OCXgMVIcK7 +uJGHsW3cd073NGFITyQ +G1u0pUmU6ehCm +mk6lShdOa8kXT8i7mLd3fK +u5C7glqT5XqtO0JE2686lk1 +h4omSc1jcLLwW +tFY2ng51v +vmAT10eeE47fgH20pLi +uN803aW +qqbDw46IgGds4 +32v414p63Jv1B4tO1xy +73xdw4X +d3o1712a03n20qvi62U7 +eQ80MW0h728I204P87YXc +KHtD2A2hp6OjFgS73gdgE +nI30tm7U55O0gI +LSJtFA66 +mby00c +meGb5 +pM6Gt05s1YJeii +LR2AKy0dPt8vFdIV5760jriw +1B3WMD5LSk65B2Moa +xTlDv24JYv4s +28Oe6r21yux7Lk47 +7wH3hBKdO55Xq3gEEe0 +5QLs0LVK1g +ET3d4F2I4lV PREHOOK: query: select cfloat2, cint from orc_llap_n2 PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap_n2 @@ -230,6 +320,36 @@ NULL -899422227 11.0 385623629 11.0 681126962 11.0 25892751 +NULL -838810013 +NULL 246423894 +NULL 708885482 +NULL 186967185 +NULL -595277064 +NULL 584923170 +NULL 518213127 +NULL -334595454 +NULL 241008004 +NULL 185212032 +NULL -738747840 +NULL -971543377 +NULL 940448896 +NULL -324030556 +NULL -899422227 +11.0 835111400 +11.0 -775326158 +11.0 653630202 +11.0 779427499 +11.0 797003983 +11.0 31832752 +11.0 783790031 +11.0 -898241885 +11.0 NULL +11.0 -646295381 +11.0 130912195 +11.0 -391573084 +11.0 385623629 +11.0 681126962 +11.0 25892751 PREHOOK: query: select * from orc_llap_n2 PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap_n2 @@ -238,36 +358,66 @@ POSTHOOK: query: select * from orc_llap_n2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_llap_n2 #### A masked pattern was here #### --838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL N016jPED08o NULL -246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL -708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL -186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL --595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL -584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL -518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL --334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL -241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL -185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL --738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL --971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL -940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL --324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL --899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL -835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 --775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 -653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 -779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 -797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 -31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 -783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 --898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 -NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 --646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 -130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 --391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 -385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 -681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 -25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 +-838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL N016jPED08o NULL 3.321 9.9876543210 +246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL 3.321 9.9876543210 +708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL 3.321 9.9876543210 +186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL 3.321 9.9876543210 +-595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL 3.321 9.9876543210 +584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL 3.321 9.9876543210 +518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL 3.321 9.9876543210 +-334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL 3.321 9.9876543210 +241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL 3.321 9.9876543210 +185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL 3.321 9.9876543210 +-738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL 3.321 9.9876543210 +-971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL 3.321 9.9876543210 +940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL 3.321 9.9876543210 +-324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL 3.321 9.9876543210 +-899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL 3.321 9.9876543210 +835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 3.321 9.9876543210 +-775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 3.321 9.9876543210 +653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 3.321 9.9876543210 +779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 3.321 9.9876543210 +797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 3.321 9.9876543210 +31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 3.321 9.9876543210 +783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 3.321 9.9876543210 +-898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 3.321 9.9876543210 +NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 3.321 9.9876543210 +-646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 3.321 9.9876543210 +130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 3.321 9.9876543210 +-391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 3.321 9.9876543210 +385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 3.321 9.9876543210 +681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 3.321 9.9876543210 +25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 3.321 9.9876543210 +-838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL N016jPED08o NULL 1.123 1.1234567890 +246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL 1.123 1.1234567890 +708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL 1.123 1.1234567890 +186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL 1.123 1.1234567890 +-595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL 1.123 1.1234567890 +584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL 1.123 1.1234567890 +518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL 1.123 1.1234567890 +-334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL 1.123 1.1234567890 +241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL 1.123 1.1234567890 +185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL 1.123 1.1234567890 +-738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL 1.123 1.1234567890 +-971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL 1.123 1.1234567890 +940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL 1.123 1.1234567890 +-324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL 1.123 1.1234567890 +-899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL 1.123 1.1234567890 +835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 1.123 1.1234567890 +-775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 1.123 1.1234567890 +653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 1.123 1.1234567890 +779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 1.123 1.1234567890 +797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 1.123 1.1234567890 +31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 1.123 1.1234567890 +783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 1.123 1.1234567890 +-898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 1.123 1.1234567890 +NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 1.123 1.1234567890 +-646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 1.123 1.1234567890 +130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 1.123 1.1234567890 +-391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 1.123 1.1234567890 +385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 1.123 1.1234567890 +681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 1.123 1.1234567890 +25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 1.123 1.1234567890 PREHOOK: query: select cstring1 from orc_llap2 PREHOOK: type: QUERY PREHOOK: Input: default@orc_llap2 @@ -352,36 +502,36 @@ POSTHOOK: query: select * from orc_llap2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_llap2 #### A masked pattern was here #### -246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL -708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL -186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL --595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL -584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL -518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL --334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL -241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL -185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL --738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL --971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL -940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL --324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL --899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL -835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 --775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 -653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 -779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 -797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 -31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 -783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 --898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 -NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 --646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 -130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 --391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 -385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 -681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 -25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 --838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL testvalue NULL +246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL 246423894 -1645852809 NULL NULL Q1JAdUlCVORmR0Q5X5Vf5u6 NULL 1.123 1.1234567890 +708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL 708885482 -1645852809 NULL NULL eNsh5tYa NULL 1.123 1.1234567890 +186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 186967185 -1645852809 NULL NULL 5j7GJ8OCXgMVIcK7 NULL 1.123 1.1234567890 +-595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL -595277064 -1645852809 NULL NULL uJGHsW3cd073NGFITyQ NULL 1.123 1.1234567890 +584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL 584923170 -1645852809 NULL NULL G1u0pUmU6ehCm NULL 1.123 1.1234567890 +518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL 518213127 -1645852809 NULL NULL mk6lShdOa8kXT8i7mLd3fK NULL 1.123 1.1234567890 +-334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL -334595454 -1645852809 NULL NULL u5C7glqT5XqtO0JE2686lk1 NULL 1.123 1.1234567890 +241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL 241008004 -1645852809 NULL NULL h4omSc1jcLLwW NULL 1.123 1.1234567890 +185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL 185212032 -1645852809 NULL NULL tFY2ng51v NULL 1.123 1.1234567890 +-738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi NULL 1.123 1.1234567890 +-971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL -971543377 -1645852809 NULL NULL uN803aW NULL 1.123 1.1234567890 +940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL 940448896 -1645852809 NULL NULL qqbDw46IgGds4 NULL 1.123 1.1234567890 +-324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL -324030556 -1645852809 NULL NULL 32v414p63Jv1B4tO1xy NULL 1.123 1.1234567890 +-899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X NULL 1.123 1.1234567890 +835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL 835111400 1964238982 11.0 NULL d3o1712a03n20qvi62U7 11.0 1.123 1.1234567890 +-775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL -775326158 -1289793978 11.0 NULL eQ80MW0h728I204P87YXc 11.0 1.123 1.1234567890 +653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL 653630202 1281184487 11.0 NULL KHtD2A2hp6OjFgS73gdgE 11.0 1.123 1.1234567890 +779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL 779427499 1326393090 11.0 NULL nI30tm7U55O0gI 11.0 1.123 1.1234567890 +797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL 797003983 1186689849 11.0 NULL LSJtFA66 11.0 1.123 1.1234567890 +31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL 31832752 1854212271 11.0 NULL mby00c 11.0 1.123 1.1234567890 +783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL 783790031 -1482854823 11.0 NULL meGb5 11.0 1.123 1.1234567890 +-898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL -898241885 -1785664982 11.0 NULL pM6Gt05s1YJeii 11.0 1.123 1.1234567890 +NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL NULL -1083386085 11.0 NULL LR2AKy0dPt8vFdIV5760jriw 11.0 1.123 1.1234567890 +-646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL -646295381 -1654635859 11.0 NULL 1B3WMD5LSk65B2Moa 11.0 1.123 1.1234567890 +130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL 130912195 -1286145901 11.0 NULL xTlDv24JYv4s 11.0 1.123 1.1234567890 +-391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL -391573084 -236100834 11.0 NULL 28Oe6r21yux7Lk47 11.0 1.123 1.1234567890 +385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 385623629 236101682 11.0 NULL 7wH3hBKdO55Xq3gEEe0 11.0 1.123 1.1234567890 +681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 681126962 993392163 11.0 NULL 5QLs0LVK1g 11.0 1.123 1.1234567890 +25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL 25892751 -1978674520 11.0 NULL ET3d4F2I4lV 11.0 1.123 1.1234567890 +-838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL -838810013 1864027286 NULL NULL testvalue NULL 3.321 9.9876543210 PREHOOK: query: DROP TABLE orc_llap_n2 PREHOOK: type: DROPTABLE PREHOOK: Input: default@orc_llap_n2 diff --git a/ql/src/test/results/clientpositive/llap/llap_decimal64_reader.q.out b/ql/src/test/results/clientpositive/llap/llap_decimal64_reader.q.out new file mode 100644 index 0000000000..9569e9c284 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/llap_decimal64_reader.q.out @@ -0,0 +1,303 @@ +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("4.456" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("4.456" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + sort order: ++ + Map-reduce partition columns: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + TopN Hash Memory Usage: 0.1 + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: decimal(10,2)), KEY._col1 (type: decimal(38,5)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +POSTHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +4.46 5.56789 +3.35 5.56789 +PREHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: ((cdecimal1 = 3.35) or (cdecimal1 = 4.46)) (type: boolean) + Statistics: Num rows: 24576 Data size: 5505024 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + sort order: ++ + Map-reduce partition columns: _col0 (type: decimal(10,2)), _col1 (type: decimal(38,5)) + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + TopN Hash Memory Usage: 0.1 + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled as hive.exec.orc.use.decimal64.column.vectors is set to false] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: decimal(10,2)), KEY._col1 (type: decimal(38,5)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +POSTHOOK: query: select cdecimal1,cdecimal2 from orc_llap_n0 where cdecimal1 = cast("3.345" as decimal(10,2)) or cdecimal1 = cast("4.456" as decimal(10,2)) + group by cdecimal1,cdecimal2 limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +4.46 5.56789 +3.35 5.56789 +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 diff --git a/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out b/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out index faab23c3a3..e6fa1ac2fb 100644 --- a/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out +++ b/ql/src/test/results/clientpositive/llap/llap_partitioned.q.out @@ -1679,8 +1679,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1745,8 +1745,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2127,8 +2127,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/llap_text.q.out b/ql/src/test/results/clientpositive/llap/llap_text.q.out new file mode 100644 index 0000000000..40d08d381f --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/llap_text.q.out @@ -0,0 +1,1082 @@ +PREHOOK: query: DROP TABLE text_llap +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE text_llap +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE text_llap( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) +row format serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + + outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap +POSTHOOK: query: CREATE TABLE text_llap( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) +row format serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + + outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap +PREHOOK: query: insert into table text_llap +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap +POSTHOOK: query: insert into table text_llap +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap +POSTHOOK: Lineage: text_llap.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: create table text_llap2( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + bo boolean, + s string, + ts timestamp, + `dec` decimal, + bin binary) +row format delimited fields terminated by '|' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + +outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap2 +POSTHOOK: query: create table text_llap2( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + bo boolean, + s string, + ts timestamp, + `dec` decimal, + bin binary) +row format delimited fields terminated by '|' +stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + +outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap2 +PREHOOK: query: load data local inpath '../../data/files/over10k.gz' into table text_llap2 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@text_llap2 +POSTHOOK: query: load data local inpath '../../data/files/over10k.gz' into table text_llap2 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@text_llap2 +PREHOOK: query: create table text_llap1 like text_llap +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap1 +POSTHOOK: query: create table text_llap1 like text_llap +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap1 +PREHOOK: query: create table text_llap100 like text_llap +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap100 +POSTHOOK: query: create table text_llap100 like text_llap +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap100 +PREHOOK: query: create table text_llap1000 like text_llap +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@text_llap1000 +POSTHOOK: query: create table text_llap1000 like text_llap +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@text_llap1000 +PREHOOK: query: insert into table text_llap1 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap1 +POSTHOOK: query: insert into table text_llap1 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap1 +POSTHOOK: Lineage: text_llap1.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap1.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap1.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap1.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap1.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap1.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: insert into table text_llap100 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap100 +POSTHOOK: query: insert into table text_llap100 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap100 +POSTHOOK: Lineage: text_llap100.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap100.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap100.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap100.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap100.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap100.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap100.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap100.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap100.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap100.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap100.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap100.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: insert into table text_llap1000 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1000 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@text_llap1000 +POSTHOOK: query: insert into table text_llap1000 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2 from alltypesorc +where cboolean2 is not null or cstring1 is not null or ctinyint is not null limit 1000 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@text_llap1000 +POSTHOOK: Lineage: text_llap1000.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: text_llap1000.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1000.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: text_llap1000.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1000.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: text_llap1000.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 alice carson 2013-03-01 09:11:58.703074 +-2 alice nixon 2013-03-01 09:11:58.703321 +-2 alice underhill 2013-03-01 09:11:58.703122 +-2 alice underhill 2013-03-01 09:11:58.703127 +-2 alice xylophone 2013-03-01 09:11:58.703105 +-2 bob falkner 2013-03-01 09:11:58.703071 +-2 bob king 2013-03-01 09:11:58.703236 +-2 bob ovid 2013-03-01 09:11:58.703285 +-2 bob van buren 2013-03-01 09:11:58.703218 +-2 bob xylophone 2013-03-01 09:11:58.703219 +-2 calvin xylophone 2013-03-01 09:11:58.703083 +-2 david falkner 2013-03-01 09:11:58.703254 +-2 david laertes 2013-03-01 09:11:58.703076 +-2 david miller 2013-03-01 09:11:58.703238 +-3 alice allen 2013-03-01 09:11:58.703323 +-3 alice davidson 2013-03-01 09:11:58.703226 +-3 alice falkner 2013-03-01 09:11:58.703304 +-3 alice king 2013-03-01 09:11:58.70314 +-3 alice king 2013-03-01 09:11:58.703247 +-3 alice xylophone 2013-03-01 09:11:58.703129 +-3 bob ellison 2013-03-01 09:11:58.703261 +-3 bob falkner 2013-03-01 09:11:58.70328 +-3 bob ichabod 2013-03-01 09:11:58.70324 +-3 bob johnson 2013-03-01 09:11:58.703204 +-3 bob polk 2013-03-01 09:11:58.703128 +-3 bob underhill 2013-03-01 09:11:58.703176 +-3 bob underhill 2013-03-01 09:11:58.703188 +-3 bob van buren 2013-03-01 09:11:58.703199 +-3 calvin ichabod 2013-03-01 09:11:58.703213 +-3 calvin white 2013-03-01 09:11:58.703295 +-3 david carson 2013-03-01 09:11:58.703136 +-3 david falkner 2013-03-01 09:11:58.703305 +-3 david garcia 2013-03-01 09:11:58.70319 +-3 david hernandez 2013-03-01 09:11:58.703252 +-3 ethan steinbeck 2013-03-01 09:11:58.703079 +-3 ethan underhill 2013-03-01 09:11:58.703138 +-3 fred ellison 2013-03-01 09:11:58.703233 +-3 gabriella brown 2013-03-01 09:11:58.703288 +-3 holly nixon 2013-03-01 09:11:58.703262 +-3 holly polk 2013-03-01 09:11:58.703273 +-3 holly steinbeck 2013-03-01 09:11:58.703242 +-3 holly thompson 2013-03-01 09:11:58.703073 +-3 holly underhill 2013-03-01 09:11:58.703219 +-3 irene ellison 2013-03-01 09:11:58.703092 +-3 irene underhill 2013-03-01 09:11:58.703298 +-3 irene young 2013-03-01 09:11:58.703084 +-3 jessica johnson 2013-03-01 09:11:58.703319 +-3 jessica king 2013-03-01 09:11:58.703279 +-3 jessica miller 2013-03-01 09:11:58.703245 +-3 jessica white 2013-03-01 09:11:58.703199 +-3 katie ichabod 2013-03-01 09:11:58.703139 +-3 luke garcia 2013-03-01 09:11:58.703076 +-3 luke ichabod 2013-03-01 09:11:58.703294 +-3 luke king 2013-03-01 09:11:58.703207 +-3 luke young 2013-03-01 09:11:58.703182 +-3 mike allen 2013-03-01 09:11:58.703292 +-3 mike king 2013-03-01 09:11:58.703214 +-3 mike polk 2013-03-01 09:11:58.70319 +-3 mike white 2013-03-01 09:11:58.703087 +-3 mike xylophone 2013-03-01 09:11:58.703308 +-3 nick nixon 2013-03-01 09:11:58.703083 +-3 nick robinson 2013-03-01 09:11:58.703147 +-3 oscar davidson 2013-03-01 09:11:58.703071 +-3 oscar garcia 2013-03-01 09:11:58.703282 +-3 oscar johnson 2013-03-01 09:11:58.70311 +-3 oscar johnson 2013-03-01 09:11:58.703133 +-3 oscar miller 2013-03-01 09:11:58.70332 +-3 priscilla laertes 2013-03-01 09:11:58.70325 +-3 priscilla quirinius 2013-03-01 09:11:58.703228 +-3 priscilla zipper 2013-03-01 09:11:58.703321 +-3 quinn ellison 2013-03-01 09:11:58.703232 +-3 quinn polk 2013-03-01 09:11:58.703244 +-3 rachel davidson 2013-03-01 09:11:58.703316 +-3 rachel thompson 2013-03-01 09:11:58.703276 +-3 sarah miller 2013-03-01 09:11:58.70316 +-3 sarah robinson 2013-03-01 09:11:58.703288 +-3 sarah xylophone 2013-03-01 09:11:58.703112 +-3 sarah zipper 2013-03-01 09:11:58.703289 +-3 tom hernandez 2013-03-01 09:11:58.703108 +-3 tom hernandez 2013-03-01 09:11:58.703188 +-3 tom polk 2013-03-01 09:11:58.703217 +-3 tom steinbeck 2013-03-01 09:11:58.703251 +-3 ulysses carson 2013-03-01 09:11:58.703253 +-3 ulysses ellison 2013-03-01 09:11:58.703197 +-3 ulysses quirinius 2013-03-01 09:11:58.703189 +-3 ulysses robinson 2013-03-01 09:11:58.703227 +-3 ulysses steinbeck 2013-03-01 09:11:58.703259 +-3 victor allen 2013-03-01 09:11:58.703155 +-3 victor hernandez 2013-03-01 09:11:58.703176 +-3 victor robinson 2013-03-01 09:11:58.703305 +-3 victor thompson 2013-03-01 09:11:58.703299 +-3 victor xylophone 2013-03-01 09:11:58.703135 +-3 wendy quirinius 2013-03-01 09:11:58.703266 +-3 wendy robinson 2013-03-01 09:11:58.703294 +-3 wendy xylophone 2013-03-01 09:11:58.703191 +-3 xavier garcia 2013-03-01 09:11:58.703194 +-3 xavier ovid 2013-03-01 09:11:58.703148 +-3 yuri xylophone 2013-03-01 09:11:58.703258 +-3 zach thompson 2013-03-01 09:11:58.703252 +-3 zach young 2013-03-01 09:11:58.703191 +PREHOOK: query: select * from text_llap2 order by t, s, ts limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select * from text_llap2 order by t, s, ts limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 305 65767 4294967529 76.54 4.72 true calvin xylophone 2013-03-01 09:11:58.703083 69 quiet hour +-2 331 65707 4294967335 67.12 13.51 false bob ovid 2013-03-01 09:11:58.703285 62 joggying +-2 373 65548 4294967423 16.98 43.6 true alice nixon 2013-03-01 09:11:58.703321 53 debate +-2 378 65553 4294967461 9.81 10.36 true bob king 2013-03-01 09:11:58.703236 91 opthamology +-2 389 65706 4294967488 26.68 17.93 false alice underhill 2013-03-01 09:11:58.703122 87 forestry +-2 389 65738 4294967520 99.45 26.26 true bob falkner 2013-03-01 09:11:58.703071 17 nap time +-2 393 65715 4294967305 48.3 1.85 true alice xylophone 2013-03-01 09:11:58.703105 30 values clariffication +-2 406 65582 4294967311 20.94 35.74 false bob van buren 2013-03-01 09:11:58.703218 25 opthamology +-2 406 65762 4294967443 1.79 33.42 false david falkner 2013-03-01 09:11:58.703254 58 opthamology +-2 407 65612 4294967318 25.48 41.56 true david laertes 2013-03-01 09:11:58.703076 40 forestry +-2 427 65666 4294967465 19.69 33.24 true bob xylophone 2013-03-01 09:11:58.703219 33 joggying +-2 446 65790 4294967302 6.49 10.81 false alice underhill 2013-03-01 09:11:58.703127 44 undecided +-2 450 65727 4294967487 94.57 30.4 false david miller 2013-03-01 09:11:58.703238 40 religion +-2 473 65565 4294967320 87.78 12.26 true alice carson 2013-03-01 09:11:58.703074 90 xylophone band +-3 260 65595 4294967545 59.07 6.75 false bob falkner 2013-03-01 09:11:58.70328 37 chemistry +-3 264 65776 4294967398 20.95 5.97 false bob polk 2013-03-01 09:11:58.703128 93 joggying +-3 266 65736 4294967397 19.94 10.01 false quinn ellison 2013-03-01 09:11:58.703232 89 forestry +-3 268 65710 4294967448 82.74 12.48 true holly polk 2013-03-01 09:11:58.703273 15 undecided +-3 270 65702 4294967512 38.05 1.07 true david carson 2013-03-01 09:11:58.703136 28 philosophy +-3 275 65543 4294967522 74.92 17.29 false mike king 2013-03-01 09:11:58.703214 53 opthamology +-3 275 65575 4294967441 38.22 2.43 true sarah xylophone 2013-03-01 09:11:58.703112 93 wind surfing +-3 275 65622 4294967302 71.78 8.49 false wendy robinson 2013-03-01 09:11:58.703294 95 undecided +-3 279 65661 4294967536 25.5 0.02 false wendy quirinius 2013-03-01 09:11:58.703266 75 undecided +-3 280 65548 4294967350 52.3 33.06 true calvin white 2013-03-01 09:11:58.703295 30 quiet hour +-3 280 65597 4294967377 18.44 49.8 true alice falkner 2013-03-01 09:11:58.703304 74 zync studies +-3 280 65769 4294967324 28.78 35.05 true xavier ovid 2013-03-01 09:11:58.703148 43 kindergarten +-3 284 65566 4294967400 62.81 39.1 false jessica white 2013-03-01 09:11:58.703199 70 opthamology +-3 286 65573 4294967493 18.27 23.71 false zach young 2013-03-01 09:11:58.703191 22 kindergarten +-3 289 65757 4294967528 56.2 44.24 true luke ichabod 2013-03-01 09:11:58.703294 7 yard duty +-3 298 65720 4294967305 34.6 39.7 false ethan steinbeck 2013-03-01 09:11:58.703079 35 kindergarten +-3 299 65763 4294967542 85.96 10.45 true jessica miller 2013-03-01 09:11:58.703245 26 mathematics +-3 303 65617 4294967473 10.26 1.41 false ulysses quirinius 2013-03-01 09:11:58.703189 84 chemistry +-3 307 65634 4294967546 90.3 28.44 false irene underhill 2013-03-01 09:11:58.703298 85 forestry +-3 311 65569 4294967460 3.82 35.45 false luke garcia 2013-03-01 09:11:58.703076 93 chemistry +-3 313 65540 4294967316 25.67 39.88 false ulysses robinson 2013-03-01 09:11:58.703227 61 religion +-3 314 65670 4294967330 13.67 34.86 false wendy xylophone 2013-03-01 09:11:58.703191 85 mathematics +-3 315 65671 4294967412 94.22 25.96 true oscar johnson 2013-03-01 09:11:58.703133 89 nap time +-3 316 65696 4294967445 22.0 43.41 false priscilla laertes 2013-03-01 09:11:58.70325 51 values clariffication +-3 318 65553 4294967452 9.86 32.77 false holly underhill 2013-03-01 09:11:58.703219 47 wind surfing +-3 320 65644 4294967434 84.39 48.0 false sarah robinson 2013-03-01 09:11:58.703288 72 wind surfing +-3 324 65773 4294967296 11.07 25.95 true oscar miller 2013-03-01 09:11:58.70332 57 opthamology +-3 333 65562 4294967359 22.34 35.58 false ulysses steinbeck 2013-03-01 09:11:58.703259 87 xylophone band +-3 335 65696 4294967333 72.26 9.66 true nick nixon 2013-03-01 09:11:58.703083 85 philosophy +-3 337 65629 4294967521 55.59 6.54 true luke king 2013-03-01 09:11:58.703207 59 industrial engineering +-3 337 65658 4294967361 43.4 12.05 false victor allen 2013-03-01 09:11:58.703155 45 topology +-3 339 65671 4294967311 8.37 15.98 true bob ellison 2013-03-01 09:11:58.703261 14 linguistics +-3 339 65737 4294967453 14.23 26.66 true ethan underhill 2013-03-01 09:11:58.703138 95 xylophone band +-3 343 65783 4294967378 7.1 18.16 true ulysses carson 2013-03-01 09:11:58.703253 97 mathematics +-3 344 65733 4294967363 0.56 11.96 true rachel thompson 2013-03-01 09:11:58.703276 88 wind surfing +-3 344 65756 4294967378 52.13 18.95 true victor thompson 2013-03-01 09:11:58.703299 81 topology +-3 346 65752 4294967298 56.05 34.03 false tom polk 2013-03-01 09:11:58.703217 49 zync studies +-3 350 65566 4294967434 23.22 6.68 true nick robinson 2013-03-01 09:11:58.703147 24 education +-3 362 65712 4294967325 43.73 48.74 false oscar garcia 2013-03-01 09:11:58.703282 30 chemistry +-3 374 65731 4294967388 22.35 22.71 true bob johnson 2013-03-01 09:11:58.703204 80 biology +-3 376 65548 4294967431 96.78 43.23 false fred ellison 2013-03-01 09:11:58.703233 75 education +-3 376 65766 4294967326 97.88 5.58 true sarah zipper 2013-03-01 09:11:58.703289 49 study skills +-3 381 65640 4294967379 59.34 7.97 false ulysses ellison 2013-03-01 09:11:58.703197 32 undecided +-3 384 65613 4294967470 63.49 45.85 false holly steinbeck 2013-03-01 09:11:58.703242 54 chemistry +-3 384 65676 4294967453 71.97 31.52 false alice davidson 2013-03-01 09:11:58.703226 14 xylophone band +-3 386 65611 4294967331 58.81 22.43 true sarah miller 2013-03-01 09:11:58.70316 75 mathematics +-3 386 65716 4294967496 12.12 2.37 false zach thompson 2013-03-01 09:11:58.703252 16 linguistics +-3 387 65550 4294967355 84.75 22.75 true holly thompson 2013-03-01 09:11:58.703073 52 biology +-3 400 65557 4294967503 76.31 29.44 false alice allen 2013-03-01 09:11:58.703323 19 debate +-3 408 65667 4294967509 81.68 45.9 true david hernandez 2013-03-01 09:11:58.703252 52 topology +-3 414 65608 4294967338 81.39 49.09 true tom steinbeck 2013-03-01 09:11:58.703251 11 xylophone band +-3 415 65571 4294967536 61.81 24.24 true victor robinson 2013-03-01 09:11:58.703305 23 american history +-3 423 65646 4294967378 63.19 34.04 false priscilla quirinius 2013-03-01 09:11:58.703228 35 xylophone band +-3 430 65667 4294967469 65.5 40.46 true yuri xylophone 2013-03-01 09:11:58.703258 31 american history +-3 431 65635 4294967500 29.06 0.34 false calvin ichabod 2013-03-01 09:11:58.703213 29 undecided +-3 432 65646 4294967492 0.83 27.18 true oscar davidson 2013-03-01 09:11:58.703071 56 linguistics +-3 433 65654 4294967455 6.83 5.33 false bob van buren 2013-03-01 09:11:58.703199 29 yard duty +-3 438 65618 4294967398 62.39 4.62 false victor xylophone 2013-03-01 09:11:58.703135 88 values clariffication +-3 447 65755 4294967320 43.69 20.03 false victor hernandez 2013-03-01 09:11:58.703176 14 forestry +-3 448 65610 4294967314 81.97 31.11 true mike xylophone 2013-03-01 09:11:58.703308 79 opthamology +-3 451 65696 4294967532 6.8 40.07 false luke young 2013-03-01 09:11:58.703182 27 biology +-3 454 65627 4294967481 17.6 35.72 false bob underhill 2013-03-01 09:11:58.703188 67 religion +-3 454 65705 4294967468 62.12 14.32 true mike white 2013-03-01 09:11:58.703087 40 joggying +-3 454 65733 4294967544 73.83 18.42 false bob ichabod 2013-03-01 09:11:58.70324 96 debate +-3 455 65570 4294967304 2.48 30.76 false alice king 2013-03-01 09:11:58.70314 42 forestry +-3 458 65563 4294967315 62.77 41.5 false alice king 2013-03-01 09:11:58.703247 3 mathematics +-3 458 65679 4294967331 64.29 43.8 true irene young 2013-03-01 09:11:58.703084 3 american history +-3 458 65696 4294967418 45.24 8.49 false irene ellison 2013-03-01 09:11:58.703092 54 american history +-3 459 65644 4294967456 92.71 0.08 false jessica king 2013-03-01 09:11:58.703279 53 joggying +-3 465 65551 4294967457 83.39 46.64 true mike allen 2013-03-01 09:11:58.703292 53 values clariffication +-3 465 65735 4294967298 72.3 22.58 false bob underhill 2013-03-01 09:11:58.703176 81 joggying +-3 467 65575 4294967437 81.64 23.53 true tom hernandez 2013-03-01 09:11:58.703188 33 study skills +-3 469 65577 4294967451 88.78 32.96 true katie ichabod 2013-03-01 09:11:58.703139 69 undecided +-3 469 65698 4294967357 47.51 49.22 true david falkner 2013-03-01 09:11:58.703305 78 joggying +-3 469 65752 4294967350 55.41 32.11 true oscar johnson 2013-03-01 09:11:58.70311 47 philosophy +-3 477 65785 4294967464 97.51 10.84 true tom hernandez 2013-03-01 09:11:58.703108 7 history +-3 485 65661 4294967441 26.21 16.19 false alice xylophone 2013-03-01 09:11:58.703129 97 topology +-3 485 65669 4294967428 21.34 13.07 false priscilla zipper 2013-03-01 09:11:58.703321 28 quiet hour +-3 485 65684 4294967483 11.83 8.04 false david garcia 2013-03-01 09:11:58.70319 63 wind surfing +-3 493 65662 4294967482 28.75 30.21 false xavier garcia 2013-03-01 09:11:58.703194 5 education +-3 494 65589 4294967369 48.09 14.4 false jessica johnson 2013-03-01 09:11:58.703319 79 nap time +-3 498 65751 4294967331 80.65 0.28 true gabriella brown 2013-03-01 09:11:58.703288 61 opthamology +-3 500 65704 4294967480 2.26 28.79 true mike polk 2013-03-01 09:11:58.70319 4 nap time +-3 505 65565 4294967407 68.73 4.65 true holly nixon 2013-03-01 09:11:58.703262 15 debate +-3 507 65671 4294967305 60.28 41.5 false quinn polk 2013-03-01 09:11:58.703244 77 industrial engineering +-3 507 65728 4294967525 81.95 47.14 true rachel davidson 2013-03-01 09:11:58.703316 31 study skills +PREHOOK: query: select t, f, s from text_llap2 order by t, s, f limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select t, f, s from text_llap2 order by t, s, f limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 1.79 david falkner +-2 16.98 alice nixon +-2 19.69 bob xylophone +-2 20.94 bob van buren +-2 25.48 david laertes +-2 26.68 alice underhill +-2 48.3 alice xylophone +-2 6.49 alice underhill +-2 67.12 bob ovid +-2 76.54 calvin xylophone +-2 87.78 alice carson +-2 9.81 bob king +-2 94.57 david miller +-2 99.45 bob falkner +-3 0.56 rachel thompson +-3 0.83 oscar davidson +-3 10.26 ulysses quirinius +-3 11.07 oscar miller +-3 11.83 david garcia +-3 12.12 zach thompson +-3 13.67 wendy xylophone +-3 14.23 ethan underhill +-3 17.6 bob underhill +-3 18.27 zach young +-3 18.44 alice falkner +-3 19.94 quinn ellison +-3 2.26 mike polk +-3 2.48 alice king +-3 20.95 bob polk +-3 21.34 priscilla zipper +-3 22.0 priscilla laertes +-3 22.34 ulysses steinbeck +-3 22.35 bob johnson +-3 23.22 nick robinson +-3 25.5 wendy quirinius +-3 25.67 ulysses robinson +-3 26.21 alice xylophone +-3 28.75 xavier garcia +-3 28.78 xavier ovid +-3 29.06 calvin ichabod +-3 3.82 luke garcia +-3 34.6 ethan steinbeck +-3 38.05 david carson +-3 38.22 sarah xylophone +-3 43.4 victor allen +-3 43.69 victor hernandez +-3 43.73 oscar garcia +-3 45.24 irene ellison +-3 47.51 david falkner +-3 48.09 jessica johnson +-3 52.13 victor thompson +-3 52.3 calvin white +-3 55.41 oscar johnson +-3 55.59 luke king +-3 56.05 tom polk +-3 56.2 luke ichabod +-3 58.81 sarah miller +-3 59.07 bob falkner +-3 59.34 ulysses ellison +-3 6.8 luke young +-3 6.83 bob van buren +-3 60.28 quinn polk +-3 61.81 victor robinson +-3 62.12 mike white +-3 62.39 victor xylophone +-3 62.77 alice king +-3 62.81 jessica white +-3 63.19 priscilla quirinius +-3 63.49 holly steinbeck +-3 64.29 irene young +-3 65.5 yuri xylophone +-3 68.73 holly nixon +-3 7.1 ulysses carson +-3 71.78 wendy robinson +-3 71.97 alice davidson +-3 72.26 nick nixon +-3 72.3 bob underhill +-3 73.83 bob ichabod +-3 74.92 mike king +-3 76.31 alice allen +-3 8.37 bob ellison +-3 80.65 gabriella brown +-3 81.39 tom steinbeck +-3 81.64 tom hernandez +-3 81.68 david hernandez +-3 81.95 rachel davidson +-3 81.97 mike xylophone +-3 82.74 holly polk +-3 83.39 mike allen +-3 84.39 sarah robinson +-3 84.75 holly thompson +-3 85.96 jessica miller +-3 88.78 katie ichabod +-3 9.86 holly underhill +-3 90.3 irene underhill +-3 92.71 jessica king +-3 94.22 oscar johnson +-3 96.78 fred ellison +-3 97.51 tom hernandez +-3 97.88 sarah zipper +PREHOOK: query: select ctinyint, cstring1, cboolean2 from text_llap100 order by ctinyint, cstring1, cboolean2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select ctinyint, cstring1, cboolean2 from text_llap100 order by ctinyint, cstring1, cboolean2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-1 cvLH6Eat2yFsyy7p NULL +-11 cvLH6Eat2yFsyy7p NULL +-11 cvLH6Eat2yFsyy7p NULL +-11 cvLH6Eat2yFsyy7p NULL +-12 cvLH6Eat2yFsyy7p NULL +-13 cvLH6Eat2yFsyy7p NULL +-16 cvLH6Eat2yFsyy7p NULL +-16 cvLH6Eat2yFsyy7p NULL +-19 cvLH6Eat2yFsyy7p NULL +-21 cvLH6Eat2yFsyy7p NULL +-21 cvLH6Eat2yFsyy7p NULL +-22 cvLH6Eat2yFsyy7p NULL +-22 cvLH6Eat2yFsyy7p NULL +-22 cvLH6Eat2yFsyy7p NULL +-23 cvLH6Eat2yFsyy7p NULL +-23 cvLH6Eat2yFsyy7p NULL +-23 cvLH6Eat2yFsyy7p NULL +-24 cvLH6Eat2yFsyy7p NULL +-28 cvLH6Eat2yFsyy7p NULL +-28 cvLH6Eat2yFsyy7p NULL +-30 cvLH6Eat2yFsyy7p NULL +-32 cvLH6Eat2yFsyy7p NULL +-33 cvLH6Eat2yFsyy7p NULL +-33 cvLH6Eat2yFsyy7p NULL +-34 cvLH6Eat2yFsyy7p NULL +-34 cvLH6Eat2yFsyy7p NULL +-36 cvLH6Eat2yFsyy7p NULL +-37 cvLH6Eat2yFsyy7p NULL +-4 cvLH6Eat2yFsyy7p NULL +-4 cvLH6Eat2yFsyy7p NULL +-40 cvLH6Eat2yFsyy7p NULL +-43 cvLH6Eat2yFsyy7p NULL +-44 cvLH6Eat2yFsyy7p NULL +-45 cvLH6Eat2yFsyy7p NULL +-45 cvLH6Eat2yFsyy7p NULL +-47 cvLH6Eat2yFsyy7p NULL +-48 cvLH6Eat2yFsyy7p NULL +-48 cvLH6Eat2yFsyy7p NULL +-5 cvLH6Eat2yFsyy7p NULL +-5 cvLH6Eat2yFsyy7p NULL +-5 cvLH6Eat2yFsyy7p NULL +-50 cvLH6Eat2yFsyy7p NULL +-51 cvLH6Eat2yFsyy7p NULL +-53 cvLH6Eat2yFsyy7p NULL +-54 cvLH6Eat2yFsyy7p NULL +-55 cvLH6Eat2yFsyy7p NULL +-55 cvLH6Eat2yFsyy7p NULL +-56 cvLH6Eat2yFsyy7p NULL +-56 cvLH6Eat2yFsyy7p NULL +-57 cvLH6Eat2yFsyy7p NULL +-59 cvLH6Eat2yFsyy7p NULL +-62 cvLH6Eat2yFsyy7p NULL +-7 cvLH6Eat2yFsyy7p NULL +0 cvLH6Eat2yFsyy7p NULL +0 cvLH6Eat2yFsyy7p NULL +10 cvLH6Eat2yFsyy7p NULL +13 cvLH6Eat2yFsyy7p NULL +16 cvLH6Eat2yFsyy7p NULL +18 cvLH6Eat2yFsyy7p NULL +19 cvLH6Eat2yFsyy7p NULL +2 cvLH6Eat2yFsyy7p NULL +21 cvLH6Eat2yFsyy7p NULL +24 cvLH6Eat2yFsyy7p NULL +24 cvLH6Eat2yFsyy7p NULL +26 cvLH6Eat2yFsyy7p NULL +27 cvLH6Eat2yFsyy7p NULL +27 cvLH6Eat2yFsyy7p NULL +28 cvLH6Eat2yFsyy7p NULL +29 cvLH6Eat2yFsyy7p NULL +29 cvLH6Eat2yFsyy7p NULL +30 cvLH6Eat2yFsyy7p NULL +31 cvLH6Eat2yFsyy7p NULL +31 cvLH6Eat2yFsyy7p NULL +34 cvLH6Eat2yFsyy7p NULL +34 cvLH6Eat2yFsyy7p NULL +36 cvLH6Eat2yFsyy7p NULL +36 cvLH6Eat2yFsyy7p NULL +38 cvLH6Eat2yFsyy7p NULL +38 cvLH6Eat2yFsyy7p NULL +38 cvLH6Eat2yFsyy7p NULL +39 cvLH6Eat2yFsyy7p NULL +4 cvLH6Eat2yFsyy7p NULL +40 cvLH6Eat2yFsyy7p NULL +40 cvLH6Eat2yFsyy7p NULL +41 cvLH6Eat2yFsyy7p NULL +43 cvLH6Eat2yFsyy7p NULL +46 cvLH6Eat2yFsyy7p NULL +5 cvLH6Eat2yFsyy7p NULL +51 cvLH6Eat2yFsyy7p NULL +51 cvLH6Eat2yFsyy7p NULL +53 cvLH6Eat2yFsyy7p NULL +53 cvLH6Eat2yFsyy7p NULL +61 cvLH6Eat2yFsyy7p NULL +61 cvLH6Eat2yFsyy7p NULL +61 cvLH6Eat2yFsyy7p NULL +62 cvLH6Eat2yFsyy7p NULL +8 cvLH6Eat2yFsyy7p NULL +9 cvLH6Eat2yFsyy7p NULL +NULL cvLH6Eat2yFsyy7p NULL +NULL cvLH6Eat2yFsyy7p NULL +PREHOOK: query: select * from text_llap100 order by cint, cstring1, cstring2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select * from text_llap100 order by cint, cstring1, cstring2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-1 -75 528534767 NULL -1.389 -863.257 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.331 1969-12-31 16:00:07.585 true NULL +-11 -15431 528534767 NULL -11.0 -15431.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.176 1969-12-31 16:00:07.787 true NULL +-11 7476 528534767 NULL -11.0 7476.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.551 1969-12-31 15:59:57.567 true NULL +-11 9472 528534767 NULL -11.0 9472.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:47.917 1969-12-31 16:00:03.716 true NULL +-12 -2013 528534767 NULL -12.0 -2013.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:14.907 1969-12-31 15:59:58.789 true NULL +-13 -13372 528534767 NULL -13.0 -13372.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.499 1969-12-31 15:59:48.221 true NULL +-16 -6922 528534767 NULL -16.0 -6922.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.402 1969-12-31 15:59:50.561 true NULL +-16 -7964 528534767 NULL -16.0 -7964.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.035 1969-12-31 16:00:12.464 true NULL +-19 1206 528534767 NULL -19.0 1206.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.587 1969-12-31 16:00:08.381 true NULL +-21 -7183 528534767 NULL -21.0 -7183.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.035 1969-12-31 16:00:06.182 true NULL +-21 3168 528534767 NULL -21.0 3168.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.834 1969-12-31 16:00:13.331 true NULL +-22 3856 528534767 NULL -22.0 3856.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:48.508 1969-12-31 15:59:54.534 true NULL +-22 77 528534767 NULL -22.0 77.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.928 1969-12-31 15:59:43.621 true NULL +-22 8499 528534767 NULL -22.0 8499.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:15.626 1969-12-31 16:00:10.923 true NULL +-23 -10154 528534767 NULL -23.0 -10154.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.088 1969-12-31 15:59:56.086 true NULL +-23 13026 528534767 NULL -23.0 13026.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:14.625 1969-12-31 16:00:10.77 true NULL +-23 4587 528534767 NULL -23.0 4587.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.732 1969-12-31 15:59:48.52 true NULL +-24 163 528534767 NULL -24.0 163.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.51 1969-12-31 16:00:04.014 true NULL +-28 -15813 528534767 NULL -28.0 -15813.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.787 1969-12-31 16:00:01.546 true NULL +-28 6453 528534767 NULL -28.0 6453.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.475 1969-12-31 16:00:07.828 true NULL +-30 834 528534767 NULL -30.0 834.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:14.072 1969-12-31 16:00:03.004 true NULL +-32 11242 528534767 NULL -32.0 11242.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.091 1969-12-31 15:59:55.681 true NULL +-33 14072 528534767 NULL -33.0 14072.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:03.168 1969-12-31 15:59:55.836 true NULL +-33 7350 528534767 NULL -33.0 7350.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.952 1969-12-31 15:59:48.183 true NULL +-34 15007 528534767 NULL -34.0 15007.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:50.434 1969-12-31 16:00:13.352 true NULL +-34 4181 528534767 NULL -34.0 4181.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:06.557 1969-12-31 16:00:04.869 true NULL +-36 1639 528534767 NULL -36.0 1639.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.186 1969-12-31 16:00:13.098 true NULL +-37 -12472 528534767 NULL -37.0 -12472.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.3 1969-12-31 15:59:55.998 true NULL +-4 -1027 528534767 NULL -4.0 -1027.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:46.628 1969-12-31 16:00:11.413 true NULL +-4 2617 528534767 NULL -4.0 2617.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.21 1969-12-31 15:59:44.733 true NULL +-40 -4463 528534767 NULL -40.0 -4463.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.647 1969-12-31 15:59:46.254 true NULL +-43 486 528534767 NULL -43.0 486.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.345 1969-12-31 15:59:52.667 true NULL +-44 -1299 528534767 NULL -44.0 -1299.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.163 1969-12-31 15:59:47.687 true NULL +-45 -14072 528534767 NULL -45.0 -14072.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.621 1969-12-31 15:59:45.914 true NULL +-45 5521 528534767 NULL -45.0 5521.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.01 1969-12-31 15:59:48.553 true NULL +-47 -2468 528534767 NULL -47.0 -2468.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:48.68 1969-12-31 16:00:02.94 true NULL +-48 -7735 528534767 NULL -48.0 -7735.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.472 1969-12-31 16:00:00.8 true NULL +-48 13300 528534767 NULL -48.0 13300.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.077 1969-12-31 15:59:45.827 true NULL +-5 -13229 528534767 NULL -5.0 -13229.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.834 1969-12-31 16:00:00.388 true NULL +-5 -14379 528534767 NULL -5.0 -14379.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:00.037 1969-12-31 15:59:49.141 true NULL +-5 12422 528534767 NULL -5.0 12422.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.745 1969-12-31 15:59:48.802 true NULL +-50 -13326 528534767 NULL -50.0 -13326.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:46.674 1969-12-31 16:00:08.875 true NULL +-51 -12083 528534767 NULL -51.0 -12083.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.026 1969-12-31 16:00:02.52 true NULL +-53 -3419 528534767 NULL -53.0 -3419.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:46.771 1969-12-31 15:59:53.744 true NULL +-54 -10268 528534767 NULL -54.0 -10268.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:53.417 1969-12-31 16:00:00.687 true NULL +-55 -7353 528534767 NULL -55.0 -7353.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.941 1969-12-31 15:59:54.268 true NULL +-55 -7449 528534767 NULL -55.0 -7449.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.846 1969-12-31 15:59:55.75 true NULL +-56 8353 528534767 NULL -56.0 8353.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:11.242 1969-12-31 15:59:46.526 true NULL +-56 8402 528534767 NULL -56.0 8402.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.01 1969-12-31 16:00:05.146 true NULL +-57 -11492 528534767 NULL -57.0 -11492.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.261 1969-12-31 16:00:05.306 true NULL +-59 10688 528534767 NULL -59.0 10688.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.746 1969-12-31 16:00:15.489 true NULL +-62 10 528534767 NULL -62.0 10.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.265 1969-12-31 15:59:56.584 true NULL +-7 2541 528534767 NULL -7.0 2541.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:08.353 1969-12-31 15:59:57.374 true NULL +0 -3166 528534767 NULL 0.0 -3166.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:10.688 1969-12-31 16:00:01.385 true NULL +0 15626 528534767 NULL 0.0 15626.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.566 1969-12-31 16:00:15.217 true NULL +10 9366 528534767 NULL 10.0 9366.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.358 1969-12-31 15:59:50.592 true NULL +13 1358 528534767 NULL 13.0 1358.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:06.453 1969-12-31 16:00:00.423 true NULL +16 5780 528534767 NULL 16.0 5780.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.451 1969-12-31 16:00:12.752 true NULL +18 -3045 528534767 NULL 18.0 -3045.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:47.829 1969-12-31 16:00:05.045 true NULL +19 7952 528534767 NULL 19.0 7952.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:12.161 1969-12-31 16:00:00.95 true NULL +2 1345 528534767 NULL 2.0 1345.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.333 1969-12-31 16:00:00.517 true NULL +21 11737 528534767 NULL 21.0 11737.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.537 1969-12-31 15:59:45.022 true NULL +24 -4812 528534767 NULL 24.0 -4812.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.86 1969-12-31 15:59:55 true NULL +24 4432 528534767 NULL 24.0 4432.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:02.541 1969-12-31 16:00:10.895 true NULL +26 3961 528534767 NULL 26.0 3961.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:57.987 1969-12-31 15:59:52.232 true NULL +27 -14965 528534767 NULL 27.0 -14965.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:12.422 1969-12-31 16:00:09.517 true NULL +27 -7824 528534767 NULL 27.0 -7824.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.963 1969-12-31 15:59:56.474 true NULL +28 8035 528534767 NULL 28.0 8035.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:03.856 1969-12-31 15:59:55.95 true NULL +29 -1990 528534767 NULL 29.0 -1990.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:06.958 1969-12-31 15:59:52.902 true NULL +29 7021 528534767 NULL 29.0 7021.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:15.007 1969-12-31 16:00:15.148 true NULL +30 -814 528534767 NULL 30.0 -814.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.955 1969-12-31 16:00:11.799 true NULL +31 -9566 528534767 NULL 31.0 -9566.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.187 1969-12-31 16:00:06.961 true NULL +31 4963 528534767 NULL 31.0 4963.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.021 1969-12-31 16:00:02.997 true NULL +34 -15059 528534767 NULL 34.0 -15059.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.639 1969-12-31 16:00:13.206 true NULL +34 -4255 528534767 NULL 34.0 -4255.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.581 1969-12-31 15:59:57.88 true NULL +36 -15912 528534767 NULL 36.0 -15912.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.432 1969-12-31 16:00:04.376 true NULL +36 14907 528534767 NULL 36.0 14907.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:47.528 1969-12-31 15:59:47.206 true NULL +38 -11320 528534767 NULL 38.0 -11320.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.169 1969-12-31 16:00:03.822 true NULL +38 -4667 528534767 NULL 38.0 -4667.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:09.366 1969-12-31 15:59:52.334 true NULL +38 -6583 528534767 NULL 38.0 -6583.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:53.078 1969-12-31 16:00:06.722 true NULL +39 -10909 528534767 NULL 39.0 -10909.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.276 1969-12-31 16:00:12.738 true NULL +4 -14739 528534767 NULL 4.0 -14739.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:55.188 1969-12-31 16:00:15.26 true NULL +40 -1724 528534767 NULL 40.0 -1724.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:05.521 1969-12-31 15:59:57.835 true NULL +40 -7984 528534767 NULL 40.0 -7984.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:01.206 1969-12-31 16:00:02.59 true NULL +41 37 528534767 NULL 41.0 37.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.817 1969-12-31 15:59:53.672 true NULL +43 1475 528534767 NULL 43.0 1475.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:56.988 1969-12-31 16:00:03.442 true NULL +46 6958 528534767 NULL 46.0 6958.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.036 1969-12-31 16:00:10.191 true NULL +5 14625 528534767 NULL 5.0 14625.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:05.78 1969-12-31 16:00:15.34 true NULL +51 -15790 528534767 NULL 51.0 -15790.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:49.871 1969-12-31 15:59:57.821 true NULL +51 -4490 528534767 NULL 51.0 -4490.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.476 1969-12-31 15:59:49.318 true NULL +53 -10129 528534767 NULL 53.0 -10129.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:04.181 1969-12-31 16:00:08.061 true NULL +53 -12171 528534767 NULL 53.0 -12171.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:07.35 1969-12-31 15:59:57.549 true NULL +61 -1254 528534767 NULL 61.0 -1254.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:11.737 1969-12-31 16:00:12.004 true NULL +61 -15549 528534767 NULL 61.0 -15549.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:44.569 1969-12-31 15:59:51.665 true NULL +61 12161 528534767 NULL 61.0 12161.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:02.617 1969-12-31 16:00:10.536 true NULL +62 6557 528534767 NULL 62.0 6557.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.016 1969-12-31 16:00:00.367 true NULL +8 7860 528534767 NULL 8.0 7860.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.701 1969-12-31 16:00:01.97 true NULL +9 9169 528534767 NULL 9.0 9169.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:03.961 1969-12-31 16:00:14.126 true NULL +NULL -3012 528534767 NULL NULL -3012.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 16:00:03.756 true NULL +NULL -4213 528534767 NULL NULL -4213.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 16:00:13.589 true NULL +PREHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-10129 cvLH6Eat2yFsyy7p NULL +-10154 cvLH6Eat2yFsyy7p NULL +-10268 cvLH6Eat2yFsyy7p NULL +-1027 cvLH6Eat2yFsyy7p NULL +-10909 cvLH6Eat2yFsyy7p NULL +-11320 cvLH6Eat2yFsyy7p NULL +-11492 cvLH6Eat2yFsyy7p NULL +-12083 cvLH6Eat2yFsyy7p NULL +-12171 cvLH6Eat2yFsyy7p NULL +-12472 cvLH6Eat2yFsyy7p NULL +-1254 cvLH6Eat2yFsyy7p NULL +-1299 cvLH6Eat2yFsyy7p NULL +-13229 cvLH6Eat2yFsyy7p NULL +-13326 cvLH6Eat2yFsyy7p NULL +-13372 cvLH6Eat2yFsyy7p NULL +-14072 cvLH6Eat2yFsyy7p NULL +-14379 cvLH6Eat2yFsyy7p NULL +-14739 cvLH6Eat2yFsyy7p NULL +-14965 cvLH6Eat2yFsyy7p NULL +-15059 cvLH6Eat2yFsyy7p NULL +-15431 cvLH6Eat2yFsyy7p NULL +-15549 cvLH6Eat2yFsyy7p NULL +-15790 cvLH6Eat2yFsyy7p NULL +-15813 cvLH6Eat2yFsyy7p NULL +-15912 cvLH6Eat2yFsyy7p NULL +-1724 cvLH6Eat2yFsyy7p NULL +-1990 cvLH6Eat2yFsyy7p NULL +-2013 cvLH6Eat2yFsyy7p NULL +-2468 cvLH6Eat2yFsyy7p NULL +-3012 cvLH6Eat2yFsyy7p NULL +-3045 cvLH6Eat2yFsyy7p NULL +-3166 cvLH6Eat2yFsyy7p NULL +-3419 cvLH6Eat2yFsyy7p NULL +-4213 cvLH6Eat2yFsyy7p NULL +-4255 cvLH6Eat2yFsyy7p NULL +-4463 cvLH6Eat2yFsyy7p NULL +-4490 cvLH6Eat2yFsyy7p NULL +-4667 cvLH6Eat2yFsyy7p NULL +-4812 cvLH6Eat2yFsyy7p NULL +-6583 cvLH6Eat2yFsyy7p NULL +-6922 cvLH6Eat2yFsyy7p NULL +-7183 cvLH6Eat2yFsyy7p NULL +-7353 cvLH6Eat2yFsyy7p NULL +-7449 cvLH6Eat2yFsyy7p NULL +-75 cvLH6Eat2yFsyy7p NULL +-7735 cvLH6Eat2yFsyy7p NULL +-7824 cvLH6Eat2yFsyy7p NULL +-7964 cvLH6Eat2yFsyy7p NULL +-7984 cvLH6Eat2yFsyy7p NULL +-814 cvLH6Eat2yFsyy7p NULL +-9566 cvLH6Eat2yFsyy7p NULL +10 cvLH6Eat2yFsyy7p NULL +10688 cvLH6Eat2yFsyy7p NULL +11242 cvLH6Eat2yFsyy7p NULL +11737 cvLH6Eat2yFsyy7p NULL +1206 cvLH6Eat2yFsyy7p NULL +12161 cvLH6Eat2yFsyy7p NULL +12422 cvLH6Eat2yFsyy7p NULL +13026 cvLH6Eat2yFsyy7p NULL +13300 cvLH6Eat2yFsyy7p NULL +1345 cvLH6Eat2yFsyy7p NULL +1358 cvLH6Eat2yFsyy7p NULL +14072 cvLH6Eat2yFsyy7p NULL +14625 cvLH6Eat2yFsyy7p NULL +1475 cvLH6Eat2yFsyy7p NULL +14907 cvLH6Eat2yFsyy7p NULL +15007 cvLH6Eat2yFsyy7p NULL +15626 cvLH6Eat2yFsyy7p NULL +163 cvLH6Eat2yFsyy7p NULL +1639 cvLH6Eat2yFsyy7p NULL +2541 cvLH6Eat2yFsyy7p NULL +2617 cvLH6Eat2yFsyy7p NULL +3168 cvLH6Eat2yFsyy7p NULL +37 cvLH6Eat2yFsyy7p NULL +3856 cvLH6Eat2yFsyy7p NULL +3961 cvLH6Eat2yFsyy7p NULL +4181 cvLH6Eat2yFsyy7p NULL +4432 cvLH6Eat2yFsyy7p NULL +4587 cvLH6Eat2yFsyy7p NULL +486 cvLH6Eat2yFsyy7p NULL +4963 cvLH6Eat2yFsyy7p NULL +5521 cvLH6Eat2yFsyy7p NULL +5780 cvLH6Eat2yFsyy7p NULL +6453 cvLH6Eat2yFsyy7p NULL +6557 cvLH6Eat2yFsyy7p NULL +6958 cvLH6Eat2yFsyy7p NULL +7021 cvLH6Eat2yFsyy7p NULL +7350 cvLH6Eat2yFsyy7p NULL +7476 cvLH6Eat2yFsyy7p NULL +77 cvLH6Eat2yFsyy7p NULL +7860 cvLH6Eat2yFsyy7p NULL +7952 cvLH6Eat2yFsyy7p NULL +8035 cvLH6Eat2yFsyy7p NULL +834 cvLH6Eat2yFsyy7p NULL +8353 cvLH6Eat2yFsyy7p NULL +8402 cvLH6Eat2yFsyy7p NULL +8499 cvLH6Eat2yFsyy7p NULL +9169 cvLH6Eat2yFsyy7p NULL +9366 cvLH6Eat2yFsyy7p NULL +9472 cvLH6Eat2yFsyy7p NULL +PREHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +POSTHOOK: query: select t, s, ts from text_llap2 order by t, s, ts limit 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap2 +#### A masked pattern was here #### +-2 alice carson 2013-03-01 09:11:58.703074 +-2 alice nixon 2013-03-01 09:11:58.703321 +-2 alice underhill 2013-03-01 09:11:58.703122 +-2 alice underhill 2013-03-01 09:11:58.703127 +-2 alice xylophone 2013-03-01 09:11:58.703105 +-2 bob falkner 2013-03-01 09:11:58.703071 +-2 bob king 2013-03-01 09:11:58.703236 +-2 bob ovid 2013-03-01 09:11:58.703285 +-2 bob van buren 2013-03-01 09:11:58.703218 +-2 bob xylophone 2013-03-01 09:11:58.703219 +-2 calvin xylophone 2013-03-01 09:11:58.703083 +-2 david falkner 2013-03-01 09:11:58.703254 +-2 david laertes 2013-03-01 09:11:58.703076 +-2 david miller 2013-03-01 09:11:58.703238 +-3 alice allen 2013-03-01 09:11:58.703323 +-3 alice davidson 2013-03-01 09:11:58.703226 +-3 alice falkner 2013-03-01 09:11:58.703304 +-3 alice king 2013-03-01 09:11:58.70314 +-3 alice king 2013-03-01 09:11:58.703247 +-3 alice xylophone 2013-03-01 09:11:58.703129 +-3 bob ellison 2013-03-01 09:11:58.703261 +-3 bob falkner 2013-03-01 09:11:58.70328 +-3 bob ichabod 2013-03-01 09:11:58.70324 +-3 bob johnson 2013-03-01 09:11:58.703204 +-3 bob polk 2013-03-01 09:11:58.703128 +-3 bob underhill 2013-03-01 09:11:58.703176 +-3 bob underhill 2013-03-01 09:11:58.703188 +-3 bob van buren 2013-03-01 09:11:58.703199 +-3 calvin ichabod 2013-03-01 09:11:58.703213 +-3 calvin white 2013-03-01 09:11:58.703295 +-3 david carson 2013-03-01 09:11:58.703136 +-3 david falkner 2013-03-01 09:11:58.703305 +-3 david garcia 2013-03-01 09:11:58.70319 +-3 david hernandez 2013-03-01 09:11:58.703252 +-3 ethan steinbeck 2013-03-01 09:11:58.703079 +-3 ethan underhill 2013-03-01 09:11:58.703138 +-3 fred ellison 2013-03-01 09:11:58.703233 +-3 gabriella brown 2013-03-01 09:11:58.703288 +-3 holly nixon 2013-03-01 09:11:58.703262 +-3 holly polk 2013-03-01 09:11:58.703273 +-3 holly steinbeck 2013-03-01 09:11:58.703242 +-3 holly thompson 2013-03-01 09:11:58.703073 +-3 holly underhill 2013-03-01 09:11:58.703219 +-3 irene ellison 2013-03-01 09:11:58.703092 +-3 irene underhill 2013-03-01 09:11:58.703298 +-3 irene young 2013-03-01 09:11:58.703084 +-3 jessica johnson 2013-03-01 09:11:58.703319 +-3 jessica king 2013-03-01 09:11:58.703279 +-3 jessica miller 2013-03-01 09:11:58.703245 +-3 jessica white 2013-03-01 09:11:58.703199 +-3 katie ichabod 2013-03-01 09:11:58.703139 +-3 luke garcia 2013-03-01 09:11:58.703076 +-3 luke ichabod 2013-03-01 09:11:58.703294 +-3 luke king 2013-03-01 09:11:58.703207 +-3 luke young 2013-03-01 09:11:58.703182 +-3 mike allen 2013-03-01 09:11:58.703292 +-3 mike king 2013-03-01 09:11:58.703214 +-3 mike polk 2013-03-01 09:11:58.70319 +-3 mike white 2013-03-01 09:11:58.703087 +-3 mike xylophone 2013-03-01 09:11:58.703308 +-3 nick nixon 2013-03-01 09:11:58.703083 +-3 nick robinson 2013-03-01 09:11:58.703147 +-3 oscar davidson 2013-03-01 09:11:58.703071 +-3 oscar garcia 2013-03-01 09:11:58.703282 +-3 oscar johnson 2013-03-01 09:11:58.70311 +-3 oscar johnson 2013-03-01 09:11:58.703133 +-3 oscar miller 2013-03-01 09:11:58.70332 +-3 priscilla laertes 2013-03-01 09:11:58.70325 +-3 priscilla quirinius 2013-03-01 09:11:58.703228 +-3 priscilla zipper 2013-03-01 09:11:58.703321 +-3 quinn ellison 2013-03-01 09:11:58.703232 +-3 quinn polk 2013-03-01 09:11:58.703244 +-3 rachel davidson 2013-03-01 09:11:58.703316 +-3 rachel thompson 2013-03-01 09:11:58.703276 +-3 sarah miller 2013-03-01 09:11:58.70316 +-3 sarah robinson 2013-03-01 09:11:58.703288 +-3 sarah xylophone 2013-03-01 09:11:58.703112 +-3 sarah zipper 2013-03-01 09:11:58.703289 +-3 tom hernandez 2013-03-01 09:11:58.703108 +-3 tom hernandez 2013-03-01 09:11:58.703188 +-3 tom polk 2013-03-01 09:11:58.703217 +-3 tom steinbeck 2013-03-01 09:11:58.703251 +-3 ulysses carson 2013-03-01 09:11:58.703253 +-3 ulysses ellison 2013-03-01 09:11:58.703197 +-3 ulysses quirinius 2013-03-01 09:11:58.703189 +-3 ulysses robinson 2013-03-01 09:11:58.703227 +-3 ulysses steinbeck 2013-03-01 09:11:58.703259 +-3 victor allen 2013-03-01 09:11:58.703155 +-3 victor hernandez 2013-03-01 09:11:58.703176 +-3 victor robinson 2013-03-01 09:11:58.703305 +-3 victor thompson 2013-03-01 09:11:58.703299 +-3 victor xylophone 2013-03-01 09:11:58.703135 +-3 wendy quirinius 2013-03-01 09:11:58.703266 +-3 wendy robinson 2013-03-01 09:11:58.703294 +-3 wendy xylophone 2013-03-01 09:11:58.703191 +-3 xavier garcia 2013-03-01 09:11:58.703194 +-3 xavier ovid 2013-03-01 09:11:58.703148 +-3 yuri xylophone 2013-03-01 09:11:58.703258 +-3 zach thompson 2013-03-01 09:11:58.703252 +-3 zach young 2013-03-01 09:11:58.703191 +PREHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +PREHOOK: type: QUERY +PREHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +POSTHOOK: query: select csmallint, cstring1, cboolean2 from text_llap100 order by csmallint, cstring1, cboolean2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@text_llap100 +#### A masked pattern was here #### +-10129 cvLH6Eat2yFsyy7p NULL +-10154 cvLH6Eat2yFsyy7p NULL +-10268 cvLH6Eat2yFsyy7p NULL +-1027 cvLH6Eat2yFsyy7p NULL +-10909 cvLH6Eat2yFsyy7p NULL +-11320 cvLH6Eat2yFsyy7p NULL +-11492 cvLH6Eat2yFsyy7p NULL +-12083 cvLH6Eat2yFsyy7p NULL +-12171 cvLH6Eat2yFsyy7p NULL +-12472 cvLH6Eat2yFsyy7p NULL +-1254 cvLH6Eat2yFsyy7p NULL +-1299 cvLH6Eat2yFsyy7p NULL +-13229 cvLH6Eat2yFsyy7p NULL +-13326 cvLH6Eat2yFsyy7p NULL +-13372 cvLH6Eat2yFsyy7p NULL +-14072 cvLH6Eat2yFsyy7p NULL +-14379 cvLH6Eat2yFsyy7p NULL +-14739 cvLH6Eat2yFsyy7p NULL +-14965 cvLH6Eat2yFsyy7p NULL +-15059 cvLH6Eat2yFsyy7p NULL +-15431 cvLH6Eat2yFsyy7p NULL +-15549 cvLH6Eat2yFsyy7p NULL +-15790 cvLH6Eat2yFsyy7p NULL +-15813 cvLH6Eat2yFsyy7p NULL +-15912 cvLH6Eat2yFsyy7p NULL +-1724 cvLH6Eat2yFsyy7p NULL +-1990 cvLH6Eat2yFsyy7p NULL +-2013 cvLH6Eat2yFsyy7p NULL +-2468 cvLH6Eat2yFsyy7p NULL +-3012 cvLH6Eat2yFsyy7p NULL +-3045 cvLH6Eat2yFsyy7p NULL +-3166 cvLH6Eat2yFsyy7p NULL +-3419 cvLH6Eat2yFsyy7p NULL +-4213 cvLH6Eat2yFsyy7p NULL +-4255 cvLH6Eat2yFsyy7p NULL +-4463 cvLH6Eat2yFsyy7p NULL +-4490 cvLH6Eat2yFsyy7p NULL +-4667 cvLH6Eat2yFsyy7p NULL +-4812 cvLH6Eat2yFsyy7p NULL +-6583 cvLH6Eat2yFsyy7p NULL +-6922 cvLH6Eat2yFsyy7p NULL +-7183 cvLH6Eat2yFsyy7p NULL +-7353 cvLH6Eat2yFsyy7p NULL +-7449 cvLH6Eat2yFsyy7p NULL +-75 cvLH6Eat2yFsyy7p NULL +-7735 cvLH6Eat2yFsyy7p NULL +-7824 cvLH6Eat2yFsyy7p NULL +-7964 cvLH6Eat2yFsyy7p NULL +-7984 cvLH6Eat2yFsyy7p NULL +-814 cvLH6Eat2yFsyy7p NULL +-9566 cvLH6Eat2yFsyy7p NULL +10 cvLH6Eat2yFsyy7p NULL +10688 cvLH6Eat2yFsyy7p NULL +11242 cvLH6Eat2yFsyy7p NULL +11737 cvLH6Eat2yFsyy7p NULL +1206 cvLH6Eat2yFsyy7p NULL +12161 cvLH6Eat2yFsyy7p NULL +12422 cvLH6Eat2yFsyy7p NULL +13026 cvLH6Eat2yFsyy7p NULL +13300 cvLH6Eat2yFsyy7p NULL +1345 cvLH6Eat2yFsyy7p NULL +1358 cvLH6Eat2yFsyy7p NULL +14072 cvLH6Eat2yFsyy7p NULL +14625 cvLH6Eat2yFsyy7p NULL +1475 cvLH6Eat2yFsyy7p NULL +14907 cvLH6Eat2yFsyy7p NULL +15007 cvLH6Eat2yFsyy7p NULL +15626 cvLH6Eat2yFsyy7p NULL +163 cvLH6Eat2yFsyy7p NULL +1639 cvLH6Eat2yFsyy7p NULL +2541 cvLH6Eat2yFsyy7p NULL +2617 cvLH6Eat2yFsyy7p NULL +3168 cvLH6Eat2yFsyy7p NULL +37 cvLH6Eat2yFsyy7p NULL +3856 cvLH6Eat2yFsyy7p NULL +3961 cvLH6Eat2yFsyy7p NULL +4181 cvLH6Eat2yFsyy7p NULL +4432 cvLH6Eat2yFsyy7p NULL +4587 cvLH6Eat2yFsyy7p NULL +486 cvLH6Eat2yFsyy7p NULL +4963 cvLH6Eat2yFsyy7p NULL +5521 cvLH6Eat2yFsyy7p NULL +5780 cvLH6Eat2yFsyy7p NULL +6453 cvLH6Eat2yFsyy7p NULL +6557 cvLH6Eat2yFsyy7p NULL +6958 cvLH6Eat2yFsyy7p NULL +7021 cvLH6Eat2yFsyy7p NULL +7350 cvLH6Eat2yFsyy7p NULL +7476 cvLH6Eat2yFsyy7p NULL +77 cvLH6Eat2yFsyy7p NULL +7860 cvLH6Eat2yFsyy7p NULL +7952 cvLH6Eat2yFsyy7p NULL +8035 cvLH6Eat2yFsyy7p NULL +834 cvLH6Eat2yFsyy7p NULL +8353 cvLH6Eat2yFsyy7p NULL +8402 cvLH6Eat2yFsyy7p NULL +8499 cvLH6Eat2yFsyy7p NULL +9169 cvLH6Eat2yFsyy7p NULL +9366 cvLH6Eat2yFsyy7p NULL +9472 cvLH6Eat2yFsyy7p NULL +PREHOOK: query: DROP TABLE text_llap +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@text_llap +PREHOOK: Output: default@text_llap +POSTHOOK: query: DROP TABLE text_llap +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@text_llap +POSTHOOK: Output: default@text_llap diff --git a/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out b/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out new file mode 100644 index 0000000000..6900cdb364 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out @@ -0,0 +1,283 @@ +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: CREATE TABLE orc_llap_n0( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN, + cdecimal1 decimal(10,2), + cdecimal2 decimal(38,5)) + STORED AS ORC tblproperties ("orc.compress"="NONE") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: alter table orc_llap_n0 set tblproperties ("orc.compress"="NONE", 'orc.write.format'='UNSTABLE-PRE-2.0') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: insert into table orc_llap_n0 +select ctinyint, csmallint, cint, cbigint, cfloat, cdouble, cstring1, cstring2, ctimestamp1, ctimestamp2, cboolean1, cboolean2, + cast("3.345" as decimal(10,2)), cast("5.56789" as decimal(38,5)) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@orc_llap_n0 +POSTHOOK: Lineage: orc_llap_n0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cdecimal1 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdecimal2 SIMPLE [] +POSTHOOK: Lineage: orc_llap_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: orc_llap_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: drop table llap_temp_table +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table llap_temp_table +POSTHOOK: type: DROPTABLE +PREHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cbigint is not null +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cbigint is not null +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cint > 10) and cbigint is not null) (type: boolean) + Filter Operator + predicate: ((cint > 10) and cbigint is not null) (type: boolean) + Select Operator + expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean), cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 + ListSink + +PREHOOK: query: create table llap_temp_table as +select * from orc_llap_n0 where cint > 10 and cbigint is not null +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: database:default +PREHOOK: Output: default@llap_temp_table +POSTHOOK: query: create table llap_temp_table as +select * from orc_llap_n0 where cint > 10 and cbigint is not null +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@llap_temp_table +POSTHOOK: Lineage: llap_temp_table.cbigint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cboolean1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cboolean2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cboolean2, type:boolean, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cdecimal1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cdecimal1, type:decimal(10,2), comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cdecimal2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cdecimal2, type:decimal(38,5), comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cdouble SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cfloat SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.csmallint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cstring1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.cstring2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:cstring2, type:string, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.ctimestamp1 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.ctimestamp2 SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ] +POSTHOOK: Lineage: llap_temp_table.ctinyint SIMPLE [(orc_llap_n0)orc_llap_n0.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: select sum(hash(*)) from llap_temp_table +PREHOOK: type: QUERY +PREHOOK: Input: default@llap_temp_table +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from llap_temp_table +POSTHOOK: type: QUERY +POSTHOOK: Input: default@llap_temp_table +#### A masked pattern was here #### +212787774304 +PREHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cint < 5000000 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from orc_llap_n0 where cint > 10 and cint < 5000000 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: orc_llap_n0 + filterExpr: ((cint > 10) and (cint < 5000000)) (type: boolean) + Filter Operator + predicate: ((cint < 5000000) and (cint > 10)) (type: boolean) + Select Operator + expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean), cdecimal1 (type: decimal(10,2)), cdecimal2 (type: decimal(38,5)) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 + ListSink + +PREHOOK: query: select * from orc_llap_n0 where cint > 10 and cint < 5000000 +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +POSTHOOK: query: select * from orc_llap_n0 where cint > 10 and cint < 5000000 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_llap_n0 +#### A masked pattern was here #### +-51 NULL 6981 707684071 -51.0 NULL YdG61y00526u5 G71l66F25 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 762 1587111633 -51.0 NULL q5y2Vy1 UbUx5 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 6981 -1569596201 -51.0 NULL o4lvY20511w0EOX3P3I82p63 J6YIW3yQlW3GydlRm 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 2949963 -1580871111 -51.0 NULL 0K68k3bdl7jO7 TPPAu 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 2089466 -240556350 -51.0 NULL cXX24dH7tblSj46j2g C31eea0wrHHqvj 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +-51 NULL 6981 -471484665 -51.0 NULL 4KhrrQ0nJ7bMNTvhSCA R31tq72k1528DQ5C3Y4cNub 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 762 -755927849 -51.0 NULL a10E76jX35YwquKCTA s7473frMk58vm 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +NULL 1016 3432650 1864027286 NULL 1016.0 0SPVSOVDI73t 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.364 false true 3.35 5.56789 +NULL 10144 4756105 1864027286 NULL 10144.0 bvoO6VwRmH6181mdOm87Do 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.134 true true 3.35 5.56789 +NULL 10653 3887593 1864027286 NULL 10653.0 2wak50xB5nHswbX 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:48.858 false true 3.35 5.56789 +NULL 10782 1286921 1864027286 NULL 10782.0 ODLrXI8882q8LS8 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.138 true true 3.35 5.56789 +NULL 197 762 1864027286 NULL 2563.58 3WsVeqb28VWEEOLI8ail 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:45.603 true true 3.35 5.56789 +NULL 1535 86028 1864027286 NULL 1535.0 T2o8XRFAL0HC4ikDQnfoCymw 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.662 true true 3.35 5.56789 +NULL 5064 504142 1864027286 NULL 5064.0 PlOxor04p5cvVl 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:09.828 true true 3.35 5.56789 +NULL -3799 1248059 1864027286 NULL -3799.0 Uhps6mMh3IfHB3j7yH62K 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.622 false true 3.35 5.56789 +NULL 10299 799471 1864027286 NULL 10299.0 2fu24 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.516 false true 3.35 5.56789 +NULL -8915 2101183 1864027286 NULL -8915.0 x7By66525 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:05.831 false true 3.35 5.56789 +8 NULL 2433892 -1611863517 8.0 NULL 674ILv3V2TxFqXP6wSbL VLprkK2XfX 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 3073556 332961835 8.0 NULL rR855m18hps5nkaFqE43W pH15gLf8B4yNFDWFH74 1969-12-31 16:00:15.892 NULL true true 3.35 5.56789 +8 NULL 6981 627355276 8.0 NULL K630vaVf 7gDn3I45FGIX0J6JH74PCEN 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 2229621 -381406148 8.0 NULL q7onkS7QRPh5ghOK oKb0bi 1969-12-31 16:00:15.892 NULL true false 3.35 5.56789 +NULL 359 6981 -1887561756 NULL 9763215.5639 sF2CRfgt2K 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:12.489 false false 3.35 5.56789 +NULL -12328 3253295 -1887561756 NULL -12328.0 Ut5NYg5XWb 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:57.985 true false 3.35 5.56789 +11 NULL 1000828 1531084669 11.0 NULL wM316f6NqGIkoP388j3F6 poWQQo3Upvt3Wh 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -1908387379 11.0 NULL a3EhVU6Wuy7ycJ7wY7h2gv 0542kSCNs54o7tD6e2YuI3 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 1310786 -413875656 11.0 NULL W0rvA4H1xn0xMG4uk0 8yVVjG 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -667592125 11.0 NULL NULL xIVF2uu7 1969-12-31 16:00:02.351 NULL NULL true 3.35 5.56789 +11 NULL 3583612 -1172590956 11.0 NULL hrSdTD2Q05 mJ5nwN6o4s8Hi4 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 6981 1532810435 11.0 NULL Y5x3JuI3M8jngv5N L760FuvYP 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 4972984 -483828108 11.0 NULL Sf45K8ueb68jp6s8 jPWX6Wr4fmTBSc5HSlX1r 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 762 -1005594359 11.0 NULL BLoMwUJ51ns6pd FtT7S 1969-12-31 16:00:02.351 NULL false false 3.35 5.56789 +NULL 359 762 -1645852809 NULL 9763215.5639 40ks5556SV xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:55.352 false false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 o5mb0QP5Y48Qd4vdB0 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.062 true false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 1FNNhmiFLGw425NA13g xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.463 false false 3.35 5.56789 +NULL -13036 1288927 -1645852809 NULL -13036.0 yinBY725P7V2 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:00.763 true false 3.35 5.56789 +-51 NULL 6981 707684071 -51.0 NULL YdG61y00526u5 G71l66F25 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 762 1587111633 -51.0 NULL q5y2Vy1 UbUx5 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 6981 -1569596201 -51.0 NULL o4lvY20511w0EOX3P3I82p63 J6YIW3yQlW3GydlRm 1969-12-31 16:00:08.451 NULL false true 3.35 5.56789 +-51 NULL 2949963 -1580871111 -51.0 NULL 0K68k3bdl7jO7 TPPAu 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 2089466 -240556350 -51.0 NULL cXX24dH7tblSj46j2g C31eea0wrHHqvj 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +-51 NULL 6981 -471484665 -51.0 NULL 4KhrrQ0nJ7bMNTvhSCA R31tq72k1528DQ5C3Y4cNub 1969-12-31 16:00:08.451 NULL true false 3.35 5.56789 +-51 NULL 762 -755927849 -51.0 NULL a10E76jX35YwquKCTA s7473frMk58vm 1969-12-31 16:00:08.451 NULL true true 3.35 5.56789 +NULL 1016 3432650 1864027286 NULL 1016.0 0SPVSOVDI73t 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.364 false true 3.35 5.56789 +NULL 10144 4756105 1864027286 NULL 10144.0 bvoO6VwRmH6181mdOm87Do 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:12.134 true true 3.35 5.56789 +NULL 10653 3887593 1864027286 NULL 10653.0 2wak50xB5nHswbX 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:48.858 false true 3.35 5.56789 +NULL 10782 1286921 1864027286 NULL 10782.0 ODLrXI8882q8LS8 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.138 true true 3.35 5.56789 +NULL 197 762 1864027286 NULL 2563.58 3WsVeqb28VWEEOLI8ail 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:45.603 true true 3.35 5.56789 +NULL 1535 86028 1864027286 NULL 1535.0 T2o8XRFAL0HC4ikDQnfoCymw 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.662 true true 3.35 5.56789 +NULL 5064 504142 1864027286 NULL 5064.0 PlOxor04p5cvVl 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:09.828 true true 3.35 5.56789 +NULL -3799 1248059 1864027286 NULL -3799.0 Uhps6mMh3IfHB3j7yH62K 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:54.622 false true 3.35 5.56789 +NULL 10299 799471 1864027286 NULL 10299.0 2fu24 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:52.516 false true 3.35 5.56789 +NULL -8915 2101183 1864027286 NULL -8915.0 x7By66525 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:05.831 false true 3.35 5.56789 +8 NULL 2433892 -1611863517 8.0 NULL 674ILv3V2TxFqXP6wSbL VLprkK2XfX 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 3073556 332961835 8.0 NULL rR855m18hps5nkaFqE43W pH15gLf8B4yNFDWFH74 1969-12-31 16:00:15.892 NULL true true 3.35 5.56789 +8 NULL 6981 627355276 8.0 NULL K630vaVf 7gDn3I45FGIX0J6JH74PCEN 1969-12-31 16:00:15.892 NULL false true 3.35 5.56789 +8 NULL 2229621 -381406148 8.0 NULL q7onkS7QRPh5ghOK oKb0bi 1969-12-31 16:00:15.892 NULL true false 3.35 5.56789 +NULL 359 6981 -1887561756 NULL 9763215.5639 sF2CRfgt2K 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:12.489 false false 3.35 5.56789 +NULL -12328 3253295 -1887561756 NULL -12328.0 Ut5NYg5XWb 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:57.985 true false 3.35 5.56789 +11 NULL 1000828 1531084669 11.0 NULL wM316f6NqGIkoP388j3F6 poWQQo3Upvt3Wh 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -1908387379 11.0 NULL a3EhVU6Wuy7ycJ7wY7h2gv 0542kSCNs54o7tD6e2YuI3 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 1310786 -413875656 11.0 NULL W0rvA4H1xn0xMG4uk0 8yVVjG 1969-12-31 16:00:02.351 NULL false true 3.35 5.56789 +11 NULL 6981 -667592125 11.0 NULL NULL xIVF2uu7 1969-12-31 16:00:02.351 NULL NULL true 3.35 5.56789 +11 NULL 3583612 -1172590956 11.0 NULL hrSdTD2Q05 mJ5nwN6o4s8Hi4 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 6981 1532810435 11.0 NULL Y5x3JuI3M8jngv5N L760FuvYP 1969-12-31 16:00:02.351 NULL true true 3.35 5.56789 +11 NULL 4972984 -483828108 11.0 NULL Sf45K8ueb68jp6s8 jPWX6Wr4fmTBSc5HSlX1r 1969-12-31 16:00:02.351 NULL true false 3.35 5.56789 +11 NULL 762 -1005594359 11.0 NULL BLoMwUJ51ns6pd FtT7S 1969-12-31 16:00:02.351 NULL false false 3.35 5.56789 +NULL 359 762 -1645852809 NULL 9763215.5639 40ks5556SV xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:55.352 false false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 o5mb0QP5Y48Qd4vdB0 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.062 true false 3.35 5.56789 +NULL -75 6981 -1645852809 NULL -863.257 1FNNhmiFLGw425NA13g xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.463 false false 3.35 5.56789 +NULL -13036 1288927 -1645852809 NULL -13036.0 yinBY725P7V2 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:00.763 true false 3.35 5.56789 +PREHOOK: query: DROP TABLE orc_llap_n0 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@orc_llap_n0 +PREHOOK: Output: default@orc_llap_n0 +POSTHOOK: query: DROP TABLE orc_llap_n0 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@orc_llap_n0 +POSTHOOK: Output: default@orc_llap_n0 +PREHOOK: query: drop table llap_temp_table +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@llap_temp_table +PREHOOK: Output: default@llap_temp_table +POSTHOOK: query: drop table llap_temp_table +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@llap_temp_table +POSTHOOK: Output: default@llap_temp_table diff --git a/ql/src/test/results/clientpositive/llap/mergejoin.q.out b/ql/src/test/results/clientpositive/llap/mergejoin.q.out index 832ed487ec..b240b11708 100644 --- a/ql/src/test/results/clientpositive/llap/mergejoin.q.out +++ b/ql/src/test/results/clientpositive/llap/mergejoin.q.out @@ -64,8 +64,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -148,8 +147,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -445,8 +443,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -498,8 +496,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1620,8 +1618,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1665,8 +1663,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1816,8 +1814,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1861,8 +1859,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2012,8 +2010,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2057,8 +2055,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2220,8 +2218,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2304,8 +2302,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2356,8 +2353,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2618,8 +2615,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2671,8 +2668,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2834,8 +2831,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2887,8 +2884,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2940,8 +2937,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2993,8 +2990,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3151,8 +3148,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3204,8 +3201,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3365,8 +3362,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3449,8 +3446,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3501,8 +3497,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3771,8 +3767,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3824,8 +3820,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3877,8 +3873,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3930,8 +3926,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4101,8 +4097,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4153,8 +4149,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out index 5052fe6d6d..ca2bfb7b47 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex_llap_io.q.out @@ -92,8 +92,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -336,8 +335,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -497,8 +495,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -745,8 +742,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out index df136bf659..8765301c39 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out @@ -182,8 +182,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -513,8 +512,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -775,8 +773,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -992,8 +989,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1198,7 +1194,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 3595 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:insert_num:int, 1:c1:char(12), 2:c2:char(25), 3:c3:varchar(25), 4:c4:varchar(10), 5:c5:decimal(12,4), 6:c6:decimal(20,10), 7:b:string, 8:part:int, 9:ROW__ID:struct] + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:char(12), 2:c2:char(25), 3:c3:varchar(25), 4:c4:varchar(10), 5:c5:decimal(12,4)/DECIMAL_64, 6:c6:decimal(20,10), 7:b:string, 8:part:int, 9:ROW__ID:struct] Select Operator expressions: insert_num (type: int), part (type: int), c1 (type: char(12)), c2 (type: char(25)), c3 (type: varchar(25)), c4 (type: varchar(10)), c5 (type: decimal(12,4)), c6 (type: decimal(20,10)), b (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 @@ -1223,8 +1219,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1232,7 +1227,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 8 includeColumns: [0, 1, 2, 3, 4, 5, 6, 7] - dataColumns: insert_num:int, c1:char(12), c2:char(25), c3:varchar(25), c4:varchar(10), c5:decimal(12,4), c6:decimal(20,10), b:string + dataColumns: insert_num:int, c1:char(12), c2:char(25), c3:varchar(25), c4:varchar(10), c5:decimal(12,4)/DECIMAL_64, c6:decimal(20,10), b:string partitionColumnCount: 1 partitionColumns: part:int scratchColumnTypeNames: [] diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out index 7259b33546..36b53e5ec2 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_llap_io.q.out @@ -88,8 +88,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -200,8 +199,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -314,8 +312,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -439,8 +436,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -590,8 +586,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -736,8 +731,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -875,8 +869,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1028,8 +1021,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1158,8 +1150,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out index 8f83622069..867e13406b 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table_llap_io.q.out @@ -88,8 +88,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -194,8 +193,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -293,8 +291,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -400,8 +397,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -501,8 +497,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -608,8 +603,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -716,8 +710,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -879,8 +872,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1005,8 +997,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1134,8 +1125,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1255,8 +1245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1377,8 +1366,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1556,8 +1544,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1726,8 +1713,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out b/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out index 1a32227dd2..5a23539cc9 100644 --- a/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out @@ -83,8 +83,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true @@ -231,8 +230,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out b/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out index c08154efa9..f3122444b7 100644 --- a/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_create_struct_table.q.out @@ -79,8 +79,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -191,8 +190,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -303,8 +301,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out index 5e835cd1db..3170625d2f 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out @@ -62,7 +62,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)) outputColumnNames: _col0 @@ -86,8 +86,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -95,7 +95,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -178,7 +178,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)) outputColumnNames: _col0 @@ -203,8 +203,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -212,7 +211,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out index 6cd1e8d353..30a6770868 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out @@ -111,8 +111,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -291,8 +291,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -465,7 +465,7 @@ STAGE PLANS: Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5), 2:cdecimal2:decimal(16,0), 3:cint:int, 4:ROW__ID:struct] + vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5)/DECIMAL_64, 2:cdecimal2:decimal(16,0)/DECIMAL_64, 3:cint:int, 4:ROW__ID:struct] Select Operator expressions: cdecimal1 (type: decimal(11,5)), cdecimal2 (type: decimal(16,0)), cint (type: int) outputColumnNames: cdecimal1, cdecimal2, cint @@ -477,7 +477,7 @@ STAGE PLANS: Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), count() Group By Vectorization: - aggregators: VectorUDAFCount(col 1:decimal(11,5)) -> bigint, VectorUDAFMaxDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFMinDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFSumDecimal(col 1:decimal(11,5)) -> decimal(21,5), VectorUDAFCount(col 2:decimal(16,0)) -> bigint, VectorUDAFMaxDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFMinDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFSumDecimal(col 2:decimal(16,0)) -> decimal(26,0), VectorUDAFCountStar(*) -> bigint + aggregators: VectorUDAFCount(col 1:decimal(11,5)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFMinDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> decimal(21,5), VectorUDAFCount(col 2:decimal(16,0)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFMinDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> decimal(26,0), VectorUDAFCountStar(*) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 3:int @@ -506,8 +506,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -515,7 +514,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 4 includeColumns: [1, 2, 3] - dataColumns: cdouble:double, cdecimal1:decimal(11,5), cdecimal2:decimal(16,0), cint:int + dataColumns: cdouble:double, cdecimal1:decimal(11,5)/DECIMAL_64, cdecimal2:decimal(16,0)/DECIMAL_64, cint:int partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -664,20 +663,20 @@ STAGE PLANS: Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5), 2:cdecimal2:decimal(16,0), 3:cint:int, 4:ROW__ID:struct] + vectorizationSchemaColumns: [0:cdouble:double, 1:cdecimal1:decimal(11,5)/DECIMAL_64, 2:cdecimal2:decimal(16,0)/DECIMAL_64, 3:cint:int, 4:ROW__ID:struct] Select Operator expressions: cint (type: int), cdecimal1 (type: decimal(11,5)), cdecimal2 (type: decimal(16,0)), UDFToDouble(cdecimal1) (type: double), (UDFToDouble(cdecimal1) * UDFToDouble(cdecimal1)) (type: double), UDFToDouble(cdecimal2) (type: double), (UDFToDouble(cdecimal2) * UDFToDouble(cdecimal2)) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [3, 1, 2, 5, 8, 6, 10] - selectExpressions: CastDecimalToDouble(col 1:decimal(11,5)) -> 5:double, DoubleColMultiplyDoubleColumn(col 6:double, col 7:double)(children: CastDecimalToDouble(col 1:decimal(11,5)) -> 6:double, CastDecimalToDouble(col 1:decimal(11,5)) -> 7:double) -> 8:double, CastDecimalToDouble(col 2:decimal(16,0)) -> 6:double, DoubleColMultiplyDoubleColumn(col 7:double, col 9:double)(children: CastDecimalToDouble(col 2:decimal(16,0)) -> 7:double, CastDecimalToDouble(col 2:decimal(16,0)) -> 9:double) -> 10:double + projectedOutputColumnNums: [3, 1, 2, 6, 9, 7, 12] + selectExpressions: CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 6:double, DoubleColMultiplyDoubleColumn(col 7:double, col 8:double)(children: CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 7:double, CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 8:double) -> 9:double, CastDecimalToDouble(col 10:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) -> 7:double, DoubleColMultiplyDoubleColumn(col 8:double, col 11:double)(children: CastDecimalToDouble(col 10:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) -> 8:double, CastDecimalToDouble(col 10:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) -> 11:double) -> 12:double Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(_col1), max(_col1), min(_col1), sum(_col1), sum(_col4), sum(_col3), count(_col2), max(_col2), min(_col2), sum(_col2), sum(_col6), sum(_col5), count() Group By Vectorization: - aggregators: VectorUDAFCount(col 1:decimal(11,5)) -> bigint, VectorUDAFMaxDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFMinDecimal(col 1:decimal(11,5)) -> decimal(11,5), VectorUDAFSumDecimal(col 1:decimal(11,5)) -> decimal(21,5), VectorUDAFSumDouble(col 8:double) -> double, VectorUDAFSumDouble(col 5:double) -> double, VectorUDAFCount(col 2:decimal(16,0)) -> bigint, VectorUDAFMaxDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFMinDecimal(col 2:decimal(16,0)) -> decimal(16,0), VectorUDAFSumDecimal(col 2:decimal(16,0)) -> decimal(26,0), VectorUDAFSumDouble(col 10:double) -> double, VectorUDAFSumDouble(col 6:double) -> double, VectorUDAFCountStar(*) -> bigint + aggregators: VectorUDAFCount(col 1:decimal(11,5)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFMinDecimal64(col 1:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> decimal(21,5), VectorUDAFSumDouble(col 9:double) -> double, VectorUDAFSumDouble(col 6:double) -> double, VectorUDAFCount(col 2:decimal(16,0)/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFMinDecimal64(col 2:decimal(16,0)/DECIMAL_64) -> decimal(16,0)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> decimal(26,0), VectorUDAFSumDouble(col 12:double) -> double, VectorUDAFSumDouble(col 7:double) -> double, VectorUDAFCountStar(*) -> bigint className: VectorGroupByOperator groupByMode: HASH keyExpressions: col 3:int @@ -706,8 +705,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -715,9 +713,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 4 includeColumns: [1, 2, 3] - dataColumns: cdouble:double, cdecimal1:decimal(11,5), cdecimal2:decimal(16,0), cint:int + dataColumns: cdouble:double, cdecimal1:decimal(11,5)/DECIMAL_64, cdecimal2:decimal(16,0)/DECIMAL_64, cint:int partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double, double, double] + scratchColumnTypeNames: [decimal(11,5), double, double, double, double, decimal(16,0), double, double] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out index 67630b44e4..2414907b23 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out @@ -60,8 +60,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -184,8 +184,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out index 50e4305b2e..5e7e8cacef 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out @@ -612,8 +612,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1232,8 +1232,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out index 6737052e03..eb4a5888fe 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out @@ -59,15 +59,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col1 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -77,7 +77,7 @@ STAGE PLANS: keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumnNums: [3] Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(11,0)) Execution mode: vectorized, llap @@ -86,8 +86,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -95,9 +94,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -177,22 +176,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col2 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col2 (type: decimal(11,0)) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [0] @@ -204,8 +203,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -213,9 +211,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -585,15 +583,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col1 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -603,7 +601,7 @@ STAGE PLANS: keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumnNums: [3] Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(11,0)) Execution mode: vectorized, llap @@ -611,8 +609,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -620,9 +618,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -702,22 +700,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:dec:decimal(10,0), 1:ROW__ID:struct] + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] Select Operator expressions: dec (type: decimal(10,0)), round(dec, -1) (type: decimal(11,0)) outputColumnNames: _col0, _col2 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 0:decimal(10,0), decimalPlaces -1) -> 2:decimal(11,0) + projectedOutputColumnNums: [0, 3] + selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col2 (type: decimal(11,0)) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumnNums: [3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [0] @@ -728,8 +726,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -737,9 +735,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 1 includeColumns: [0] - dataColumns: dec:decimal(10,0) + dataColumns: dec:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(11,0)] + scratchColumnTypeNames: [decimal(10,0), decimal(11,0)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out index 9e1c8d7ea0..4c9b7379d5 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out @@ -86,12 +86,12 @@ STAGE PLANS: Statistics: Num rows: 39 Data size: 4032 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -118,8 +118,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -127,7 +127,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double] @@ -195,12 +195,12 @@ STAGE PLANS: Statistics: Num rows: 39 Data size: 4188 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 2 Data size: 232 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -227,8 +227,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true @@ -236,7 +236,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double, double] @@ -310,12 +310,12 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -343,8 +343,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -352,7 +351,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double] @@ -420,12 +419,12 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:decimal(14,5), 1:value:int, 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterDecimalColEqualDecimalScalar(col 0:decimal(14,5), val 10) + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) predicate: (key = 10) (type: boolean) Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -453,8 +452,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true @@ -462,7 +460,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: key:decimal(14,5), value:int + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int partitionColumnCount: 0 scratchColumnTypeNames: [double, double, double, double, double, double, double, double] diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out index 2ea9018586..5c0d6bbb73 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out @@ -84,8 +84,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -222,8 +221,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -386,8 +384,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -648,8 +645,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1038,8 +1034,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out index 98e6e54f25..7dc98c2aef 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out @@ -61,8 +61,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -129,8 +128,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out index b772e9a774..ab29314031 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out @@ -98,8 +98,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -521,8 +520,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -706,8 +704,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -887,8 +884,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out index 74ad6ae3a7..5e946c4e9c 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out @@ -107,8 +107,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out b/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out index f7721b6424..4cc23c4505 100644 --- a/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out @@ -179,8 +179,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -259,8 +258,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_map_order.q.out b/ql/src/test/results/clientpositive/llap/vector_map_order.q.out index 02fc5a0490..238555c860 100644 --- a/ql/src/test/results/clientpositive/llap/vector_map_order.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_map_order.q.out @@ -75,8 +75,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out b/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out index 09a53d0ed0..e0c7dfabbe 100644 --- a/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out @@ -67,8 +67,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -111,8 +110,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -166,8 +164,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -339,8 +336,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -384,8 +380,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -440,8 +435,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_null_map.q.out b/ql/src/test/results/clientpositive/llap/vector_null_map.q.out index 666f7fdb0d..5394cc67c8 100644 --- a/ql/src/test/results/clientpositive/llap/vector_null_map.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_null_map.q.out @@ -76,8 +76,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -149,8 +148,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out b/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out index f1a4ea3080..90e21032ac 100644 --- a/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out @@ -161,8 +161,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: true @@ -245,8 +244,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: true diff --git a/ql/src/test/results/clientpositive/llap/vector_order_null.q.out b/ql/src/test/results/clientpositive/llap/vector_order_null.q.out index 08c57bdf7f..cb4053ee5a 100644 --- a/ql/src/test/results/clientpositive/llap/vector_order_null.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_order_null.q.out @@ -127,8 +127,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -250,8 +249,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -373,8 +371,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -496,8 +493,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -619,8 +615,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -742,8 +737,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -865,8 +859,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -988,8 +981,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1111,8 +1103,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1234,8 +1225,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1357,8 +1347,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out b/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out index 89c14d59da..2b0a1e792a 100644 --- a/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out @@ -271,7 +271,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(15,2)) outputColumnNames: c1 @@ -283,7 +283,7 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(15,2)) -> decimal(25,2) + aggregators: VectorUDAFSumDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> decimal(25,2) className: VectorGroupByOperator groupByMode: HASH native: false @@ -308,8 +308,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -317,7 +316,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -489,7 +488,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(15,2)), c2 (type: decimal(15,2)) outputColumnNames: c1, c2 @@ -501,10 +500,10 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(15,2)) -> decimal(25,2) + aggregators: VectorUDAFSumDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> decimal(25,2) className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:decimal(15,2), col 1:decimal(15,2) + keyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2), ConvertDecimal64ToDecimal(col 1:decimal(15,2)/DECIMAL_64) -> 4:decimal(15,2) native: false vectorProcessingMode: HASH projectedOutputColumnNums: [0] @@ -530,8 +529,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -539,9 +537,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2), decimal(15,2)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -726,12 +724,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -760,8 +758,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -769,9 +766,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Map 5 Map Operator Tree: TableScan @@ -779,12 +776,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -812,8 +809,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -821,9 +817,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1035,12 +1031,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1068,8 +1064,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1077,9 +1072,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Map 5 Map Operator Tree: TableScan @@ -1087,12 +1082,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1121,8 +1116,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1130,9 +1124,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1343,12 +1337,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1377,8 +1371,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1386,9 +1379,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Map 4 Map Operator Tree: TableScan @@ -1396,12 +1389,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(15,2), 1:c2:decimal(15,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(15,2)/DECIMAL_64, 1:c2:decimal(15,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(15,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(15,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,2)/DECIMAL_64) -> 3:decimal(15,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -1430,8 +1423,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1439,9 +1431,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(15,2), c2:decimal(15,2) + dataColumns: c1:decimal(15,2)/DECIMAL_64, c2:decimal(15,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(15,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1571,7 +1563,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(7,2)) outputColumnNames: c1 @@ -1583,7 +1575,7 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(7,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(7,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: HASH native: false @@ -1608,8 +1600,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1617,7 +1608,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1789,7 +1780,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Select Operator expressions: c1 (type: decimal(7,2)), c2 (type: decimal(7,2)) outputColumnNames: c1, c2 @@ -1801,10 +1792,10 @@ STAGE PLANS: Group By Operator aggregations: sum(c1) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(7,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(7,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:decimal(7,2), col 1:decimal(7,2) + keyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2), ConvertDecimal64ToDecimal(col 1:decimal(7,2)/DECIMAL_64) -> 4:decimal(7,2) native: false vectorProcessingMode: HASH projectedOutputColumnNums: [0] @@ -1830,8 +1821,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1839,9 +1829,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2), decimal(7,2)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -2026,12 +2016,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2060,8 +2050,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2069,9 +2058,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Map 5 Map Operator Tree: TableScan @@ -2079,12 +2068,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2112,8 +2101,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2121,9 +2109,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -2335,12 +2323,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2368,8 +2356,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2377,9 +2364,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Map 5 Map Operator Tree: TableScan @@ -2387,12 +2374,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2421,8 +2408,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2430,9 +2416,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -2643,12 +2629,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2677,8 +2663,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2686,9 +2671,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Map 4 Map Operator Tree: TableScan @@ -2696,12 +2681,12 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:c1:decimal(7,2), 1:c2:decimal(7,2), 2:ROW__ID:struct] + vectorizationSchemaColumns: [0:c1:decimal(7,2)/DECIMAL_64, 1:c2:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:decimal(7,2)) + predicateExpression: SelectColumnIsNotNull(col 3:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 0:decimal(7,2)/DECIMAL_64) -> 3:decimal(7,2)) predicate: c1 is not null (type: boolean) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Select Operator @@ -2730,8 +2715,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2739,9 +2723,9 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 2 includeColumns: [0, 1] - dataColumns: c1:decimal(7,2), c2:decimal(7,2) + dataColumns: c1:decimal(7,2)/DECIMAL_64, c2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [] + scratchColumnTypeNames: [decimal(7,2)] Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git a/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out b/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out index 568549d86a..ef4934e4cb 100644 --- a/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out @@ -111,8 +111,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out b/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out index 1ed694d5e6..bda96daf1d 100644 --- a/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out @@ -128,8 +128,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out b/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out index 64e158e086..c2342b2b68 100644 --- a/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_retry_failure.q.out @@ -80,8 +80,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing.q.out index 428ee8dc08..a5d6167fba 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing.q.out @@ -56,8 +56,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -259,8 +258,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -467,8 +465,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -646,8 +643,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -841,8 +837,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1043,8 +1038,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1251,8 +1245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1296,8 +1289,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1537,8 +1529,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1697,8 +1688,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1874,8 +1864,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2053,8 +2042,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2244,8 +2232,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2411,8 +2398,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2578,8 +2564,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2798,8 +2783,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3090,8 +3074,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3369,8 +3352,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3622,8 +3604,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3836,8 +3817,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4061,8 +4041,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4258,8 +4237,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4526,8 +4504,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4808,8 +4785,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -5323,8 +5299,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6080,8 +6055,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6263,8 +6237,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6428,8 +6401,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6585,8 +6557,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6748,8 +6719,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6921,8 +6891,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7088,8 +7057,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7265,8 +7233,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7446,8 +7413,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7626,8 +7592,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -7824,8 +7789,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8020,8 +7984,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8244,8 +8207,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8485,8 +8447,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8674,8 +8635,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -8829,8 +8789,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9017,8 +8976,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9172,8 +9130,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9331,8 +9288,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9564,8 +9520,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9705,8 +9660,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -9854,8 +9808,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out index 148f82b478..2bb7730580 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out @@ -102,8 +102,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -296,8 +295,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -438,7 +436,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: t (type: tinyint), bo (type: boolean), s (type: string), si (type: smallint), f (type: float) sort order: ++++- @@ -457,8 +455,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -466,7 +463,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 1, 4, 6, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -662,7 +659,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), i (type: int), s (type: string) sort order: +++ @@ -681,8 +678,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -690,7 +686,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -886,7 +882,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 204 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: b (type: bigint), si (type: smallint), s (type: string), d (type: double) sort order: ++++ @@ -905,8 +901,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -914,7 +909,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 3, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1110,7 +1105,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: f (type: float), b (type: bigint) sort order: ++ @@ -1130,8 +1125,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1139,7 +1133,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1354,8 +1348,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1533,8 +1526,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1680,7 +1672,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), i (type: int) sort order: ++ @@ -1700,8 +1692,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1709,7 +1700,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1944,8 +1935,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out index d87e96f407..993ea618c6 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out @@ -70,8 +70,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -122,8 +121,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out index 8dcb900911..493d4042aa 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out @@ -74,8 +74,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -317,8 +316,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -554,8 +552,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -913,8 +910,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -965,8 +961,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out index 01bcb69434..1a06f0898e 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint) sort order: ++ @@ -94,8 +94,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -103,7 +102,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 3, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10242,7 +10241,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -10269,8 +10268,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10278,7 +10276,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 7, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10520,7 +10518,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -10546,8 +10544,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10555,7 +10552,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10787,7 +10784,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -10814,8 +10811,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10823,7 +10819,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 6, 7, 10] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11060,7 +11056,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -11086,8 +11082,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11095,7 +11090,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11340,7 +11335,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 304 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -11367,8 +11362,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11376,7 +11370,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 4, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out index 5466297b38..42e9694fab 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out @@ -100,8 +100,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -226,7 +225,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 304 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), dec (type: decimal(4,2)) sort order: ++ @@ -246,8 +245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -255,7 +253,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [5, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -485,7 +483,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: bin (type: binary), d (type: double), i (type: int) sort order: ++- @@ -505,8 +503,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -514,7 +511,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7, 10] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -710,7 +707,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), s (type: string), dec (type: decimal(4,2)) sort order: +++ @@ -729,8 +726,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -738,7 +734,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -934,7 +930,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), f (type: float) sort order: ++ @@ -954,8 +950,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -963,7 +958,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 4, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1193,7 +1188,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: bo (type: boolean), s (type: string) sort order: ++ @@ -1212,8 +1207,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1221,7 +1215,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [6, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1418,7 +1412,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1446,8 +1440,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1455,7 +1448,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [bigint, bigint] Reducer 2 @@ -1639,8 +1632,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1812,8 +1804,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1985,8 +1976,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2158,8 +2148,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out index f950c4c873..91b52e7d01 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out @@ -82,7 +82,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), s (type: string), b (type: bigint) sort order: +++ @@ -101,8 +101,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -110,7 +109,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 3, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -215,7 +214,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), s (type: string), f (type: float) sort order: ++- @@ -234,8 +233,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -243,7 +241,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -348,7 +346,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -368,8 +366,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -377,7 +374,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -482,7 +479,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: t (type: tinyint), s (type: string), d (type: double) sort order: ++- @@ -501,8 +498,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -510,7 +506,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -615,7 +611,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), s (type: string) sort order: ++ @@ -635,8 +631,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -644,7 +639,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -785,7 +780,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: +- @@ -805,8 +800,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -814,7 +808,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -950,7 +944,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: +- @@ -970,8 +964,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -979,7 +972,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1115,7 +1108,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -1135,8 +1128,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1144,7 +1136,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out index 3a1b9c5ecb..782bd9be2f 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), i (type: int), b (type: bigint) sort order: +++ @@ -94,8 +94,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -103,7 +102,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 1, 2, 3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -333,7 +332,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), bo (type: boolean), i (type: int), f (type: float) sort order: +++- @@ -352,8 +351,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -361,7 +359,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -556,7 +554,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), bo (type: boolean), i (type: int), f (type: float) sort order: +++- @@ -575,8 +573,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -584,7 +581,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -779,7 +776,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string) sort order: + @@ -798,8 +795,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -807,7 +803,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -10929,7 +10925,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint), i (type: int) sort order: +++ @@ -10948,8 +10944,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -10957,7 +10952,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11187,7 +11182,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint), i (type: int) sort order: +++ @@ -11206,8 +11201,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11215,7 +11209,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11445,7 +11439,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), si (type: smallint), i (type: int) sort order: ++- @@ -11464,8 +11458,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11473,7 +11466,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11703,7 +11696,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: si (type: smallint), bo (type: boolean), i (type: int), f (type: float) sort order: +++- @@ -11722,8 +11715,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11731,7 +11723,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [1, 2, 4, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -11961,7 +11953,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), bo (type: boolean), b (type: bigint) sort order: +++ @@ -11980,8 +11972,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -11989,7 +11980,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 3, 6] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -12185,7 +12176,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), CAST( s AS CHAR(12)) (type: char(12)) sort order: ++ @@ -12206,8 +12197,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12215,7 +12205,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [string] Reducer 2 @@ -12411,7 +12401,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), CAST( s AS varchar(12)) (type: varchar(12)) sort order: ++ @@ -12432,8 +12422,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -12441,7 +12430,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [string] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out index 2bf3b070bc..ff7cf6ca81 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: f (type: float), t (type: tinyint) sort order: ++ @@ -94,8 +94,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -103,7 +102,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 4, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -334,7 +333,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), i (type: int), s (type: string) sort order: ++- @@ -353,8 +352,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -362,7 +360,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -558,7 +556,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: bo (type: boolean), b (type: bigint), s (type: string) sort order: +++ @@ -577,8 +575,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -586,7 +583,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 6, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -782,7 +779,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: dec (type: decimal(4,2)), f (type: float) sort order: ++ @@ -802,8 +799,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -811,7 +807,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1028,7 +1024,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1062,8 +1058,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1071,7 +1066,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Map 4 @@ -1081,7 +1076,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1114,8 +1109,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1123,7 +1117,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1329,7 +1323,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1363,8 +1357,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1372,7 +1365,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Map 4 @@ -1382,7 +1375,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1415,8 +1408,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1424,7 +1416,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1632,7 +1624,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1666,8 +1658,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1675,7 +1666,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 3, 8, 9] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Map 4 @@ -1685,7 +1676,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1718,8 +1709,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1727,7 +1717,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [3] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out index e1203912b0..55899efcd6 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out @@ -95,8 +95,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -242,8 +241,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -425,7 +423,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(4,2)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -450,8 +448,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -459,7 +456,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 4] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(4,2)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -853,8 +850,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out index e3d52d217b..93b8655bff 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out @@ -74,7 +74,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: i (type: int), s (type: string), b (type: bigint) sort order: +++ @@ -93,8 +93,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -102,7 +101,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 3, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -297,7 +296,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: d (type: double), s (type: string), f (type: float) sort order: +++ @@ -316,8 +315,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -325,7 +323,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -520,7 +518,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -540,8 +538,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -549,7 +546,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -744,7 +741,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), s (type: string), f (type: float) sort order: +++ @@ -763,8 +760,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -772,7 +768,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -967,7 +963,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: t (type: tinyint), s (type: string), d (type: double) sort order: ++- @@ -986,8 +982,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -995,7 +990,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [0, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1190,7 +1185,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), s (type: string) sort order: ++ @@ -1210,8 +1205,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1219,7 +1213,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 7, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1449,7 +1443,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -1468,8 +1462,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1477,7 +1470,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1707,7 +1700,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: ts (type: timestamp), f (type: float) sort order: ++ @@ -1726,8 +1719,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1735,7 +1727,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [4, 8] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -1930,7 +1922,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -1950,8 +1942,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1959,7 +1950,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -2097,7 +2088,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -2117,8 +2108,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2126,7 +2116,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 @@ -2264,7 +2254,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0), 10:bin:binary, 11:ROW__ID:struct] + vectorizationSchemaColumns: [0:t:tinyint, 1:si:smallint, 2:i:int, 3:b:bigint, 4:f:float, 5:d:double, 6:bo:boolean, 7:s:string, 8:ts:timestamp, 9:dec:decimal(10,0)/DECIMAL_64, 10:bin:binary, 11:ROW__ID:struct] Reduce Output Operator key expressions: s (type: string), i (type: int) sort order: ++ @@ -2284,8 +2274,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2293,7 +2282,7 @@ STAGE PLANS: rowBatchContext: dataColumnCount: 11 includeColumns: [2, 5, 7] - dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0), bin:binary + dataColumns: t:tinyint, si:smallint, i:int, b:bigint, f:float, d:double, bo:boolean, s:string, ts:timestamp, dec:decimal(10,0)/DECIMAL_64, bin:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reducer 2 diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out index 3cebb04147..78df4409f3 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out @@ -92,8 +92,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out b/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out index 61c5051bb9..dfe5279dca 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out @@ -145,8 +145,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -180,8 +180,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -280,8 +280,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -415,8 +415,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -451,8 +451,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false diff --git a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out index d3ab509ce7..8ee96d3367 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out @@ -82,8 +82,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -261,8 +260,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -305,8 +303,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -419,8 +417,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -448,8 +445,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -578,8 +575,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -622,8 +618,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -666,8 +662,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -804,8 +800,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -833,8 +828,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -862,8 +857,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1006,8 +1001,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1065,8 +1059,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1179,8 +1173,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1208,8 +1201,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1333,8 +1326,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1377,8 +1369,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1491,8 +1483,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1520,8 +1511,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1643,8 +1634,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1687,8 +1677,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1801,8 +1791,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1845,8 +1834,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -1959,8 +1948,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -1988,8 +1976,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2102,8 +2090,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2131,8 +2118,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2258,8 +2245,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2302,8 +2288,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2437,8 +2423,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2577,8 +2562,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2605,8 +2589,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2723,8 +2707,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2782,8 +2765,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -2896,8 +2879,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -2940,8 +2922,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3052,8 +3034,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3078,8 +3060,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3172,8 +3153,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3216,8 +3196,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3314,8 +3294,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3358,8 +3337,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3402,8 +3381,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3537,8 +3516,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3594,8 +3573,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3722,8 +3701,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -3752,8 +3730,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3782,8 +3759,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -3982,8 +3958,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -4012,8 +3987,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4042,8 +4016,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4247,8 +4220,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4277,8 +4249,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4305,8 +4276,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4335,8 +4305,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4597,8 +4566,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4641,8 +4609,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4773,8 +4741,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4817,8 +4784,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4861,8 +4828,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -4983,8 +4950,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5042,8 +5008,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5161,8 +5127,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5205,8 +5170,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5313,8 +5278,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5357,8 +5321,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5465,8 +5429,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5509,8 +5472,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5625,8 +5588,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5779,8 +5741,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5823,8 +5784,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5914,8 +5875,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -5940,8 +5901,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6015,8 +5975,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6057,8 +6016,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6157,8 +6116,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6201,8 +6159,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6245,8 +6203,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6355,8 +6313,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: true usesVectorUDFAdaptor: false @@ -6425,8 +6383,8 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFormatFeatureSupport: [] - featureSupportInUse: [] + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6533,8 +6491,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6563,8 +6520,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false @@ -6593,8 +6549,7 @@ STAGE PLANS: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] - vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] - featureSupportInUse: [] + featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat allNative: false usesVectorUDFAdaptor: false -- 2.15.1 (Apple Git-101)