diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index 1017249..4b12607 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -598,34 +598,62 @@ minillaplocal.query.files=\ quotedid_smb.q,\ resourceplan.q,\ sample10.q,\ + schema_evol_orc_acid_part_llap_io.q,\ schema_evol_orc_acid_part.q,\ + schema_evol_orc_acid_part_update_llap_io.q,\ schema_evol_orc_acid_part_update.q,\ + schema_evol_orc_acid_table_llap_io.q,\ schema_evol_orc_acid_table.q,\ + schema_evol_orc_acid_table_update_llap_io.q,\ schema_evol_orc_acid_table_update.q,\ + schema_evol_orc_acidvec_part_llap_io.q,\ schema_evol_orc_acidvec_part.q,\ + schema_evol_orc_acidvec_part_update_llap_io.q,\ schema_evol_orc_acidvec_part_update.q,\ + schema_evol_orc_acidvec_table_llap_io.q,\ schema_evol_orc_acidvec_table.q,\ + schema_evol_orc_acidvec_table_update_llap_io.q,\ schema_evol_orc_acidvec_table_update.q,\ + schema_evol_orc_nonvec_part_llap_io.q,\ schema_evol_orc_nonvec_part.q,\ + schema_evol_orc_nonvec_part_all_complex_llap_io.q,\ schema_evol_orc_nonvec_part_all_complex.q,\ + schema_evol_orc_nonvec_part_all_primitive_llap_io.q,\ schema_evol_orc_nonvec_part_all_primitive.q,\ + schema_evol_orc_nonvec_table_llap_io.q,\ schema_evol_orc_nonvec_table.q,\ + schema_evol_orc_vec_part_llap_io.q,\ schema_evol_orc_vec_part.q,\ + schema_evol_orc_vec_part_all_complex_llap_io.q,\ schema_evol_orc_vec_part_all_complex.q,\ + schema_evol_orc_vec_part_all_primitive_llap_io.q,\ schema_evol_orc_vec_part_all_primitive.q,\ + schema_evol_orc_vec_table_llap_io.q,\ schema_evol_orc_vec_table.q,\ schema_evol_stats.q,\ + schema_evol_text_nonvec_part_llap_io.q,\ schema_evol_text_nonvec_part.q,\ + schema_evol_text_nonvec_part_all_complex_llap_io.q,\ schema_evol_text_nonvec_part_all_complex.q,\ + schema_evol_text_nonvec_part_all_primitive_llap_io.q,\ schema_evol_text_nonvec_part_all_primitive.q,\ + schema_evol_text_nonvec_table_llap_io.q,\ schema_evol_text_nonvec_table.q,\ + schema_evol_text_vec_part_llap_io.q,\ schema_evol_text_vec_part.q,\ + schema_evol_text_vec_part_all_complex_llap_io.q,\ schema_evol_text_vec_part_all_complex.q,\ + schema_evol_text_vec_part_all_primitive_llap_io.q,\ schema_evol_text_vec_part_all_primitive.q,\ + schema_evol_text_vec_table_llap_io.q,\ schema_evol_text_vec_table.q,\ + schema_evol_text_vecrow_part_llap_io.q,\ schema_evol_text_vecrow_part.q,\ + schema_evol_text_vecrow_part_all_complex_llap_io.q,\ schema_evol_text_vecrow_part_all_complex.q,\ + schema_evol_text_vecrow_part_all_primitive_llap_io.q,\ schema_evol_text_vecrow_part_all_primitive.q,\ + schema_evol_text_vecrow_table_llap_io.q,\ schema_evol_text_vecrow_table.q,\ selectDistinctStar.q,\ semijoin.q,\ @@ -697,6 +725,7 @@ minillaplocal.query.files=\ vector_join30.q,\ vector_join_filters.q,\ vector_leftsemi_mapjoin.q,\ + vector_llap_text_1.q,\ vector_mapjoin_reduce.q,\ vector_number_compare_projection.q,\ vector_partitioned_date_time.q,\ diff --git llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java index 4336220..bb319f0 100644 --- llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java +++ llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java @@ -110,7 +110,7 @@ } // For non-vectorized operator case, wrap the reader if possible. RecordReader result = rr; - if (!Utilities.getUseVectorizedInputFileFormat(job)) { + if (!Utilities.getIsVectorized(job)) { result = wrapLlapReader(includedCols, rr, split); if (result == null) { // Cannot wrap a reader for non-vectorized pipeline. diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 2e1fd37..d7b3e4b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -3684,12 +3684,12 @@ private static void resetUmaskInConf(Configuration conf, boolean unsetUmask, Str /** * Returns true if a plan is both configured for vectorized execution - * and the node is vectorized and the Input File Format is marked VectorizedInputFileFormat. + * and the node is vectorized. * * The plan may be configured for vectorization * but vectorization disallowed eg. for FetchOperator execution. */ - public static boolean getUseVectorizedInputFileFormat(Configuration conf) { + public static boolean getIsVectorized(Configuration conf) { if (conf.get(VECTOR_MODE) != null) { // this code path is necessary, because with HS2 and client // side split generation we end up not finding the map work. @@ -3697,13 +3697,12 @@ public static boolean getUseVectorizedInputFileFormat(Configuration conf) { // generation is multi-threaded - HS2 plan cache uses thread // locals). return - conf.getBoolean(VECTOR_MODE, false) && - conf.getBoolean(USE_VECTORIZED_INPUT_FILE_FORMAT, false); + conf.getBoolean(VECTOR_MODE, false); } else { if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED) && Utilities.getPlanPath(conf) != null) { MapWork mapWork = Utilities.getMapWork(conf); - return (mapWork.getVectorMode() && mapWork.getUseVectorizedInputFileFormat()); + return mapWork.getVectorMode(); } else { return false; } @@ -3711,10 +3710,9 @@ public static boolean getUseVectorizedInputFileFormat(Configuration conf) { } - public static boolean getUseVectorizedInputFileFormat(Configuration conf, MapWork mapWork) { + public static boolean getIsVectorized(Configuration conf, MapWork mapWork) { return HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED) && - mapWork.getVectorMode() && - mapWork.getUseVectorizedInputFileFormat(); + mapWork.getVectorMode(); } /** diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java index cd12a0b..6f1346d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java @@ -620,6 +620,25 @@ private void setRowIdentiferToNull(VectorizedRowBatch batch) { } /* + * Flush a partially full deserializerBatch. + * @return Return true if the operator tree is not done yet. + */ + private boolean flushDeserializerBatch() throws HiveException { + if (deserializerBatch.size > 0) { + + batchCounter++; + oneRootOperator.process(deserializerBatch, 0); + deserializerBatch.reset(); + if (oneRootOperator.getDone()) { + setDone(true); + return false; + } + + } + return true; + } + + /* * Setup the context for reading from the next partition file. */ private void setupPartitionContextVars(String nominalPath) throws HiveException { @@ -672,20 +691,14 @@ private void setupPartitionContextVars(String nominalPath) throws HiveException currentReadType == VectorMapOperatorReadType.VECTOR_DESERIALIZE || currentReadType == VectorMapOperatorReadType.ROW_DESERIALIZE); - if (deserializerBatch.size > 0) { - - /* - * Clear out any rows in the batch from previous partition since we are going to change - * the repeating partition column values. - */ - batchCounter++; - oneRootOperator.process(deserializerBatch, 0); - deserializerBatch.reset(); - if (oneRootOperator.getDone()) { - setDone(true); - return; - } + /* + * Clear out any rows in the batch from previous partition since we are going to change + * the repeating partition column values. + */ + if (!flushDeserializerBatch()) { + // Operator tree is now done. + return; } /* @@ -773,6 +786,38 @@ public Deserializer getCurrentDeserializer() { return null; } + /* + * Deliver a vector batch to the operator tree. + * + * The Vectorized Input File Format reader has already set the partition column + * values, reset and filled in the batch, etc. + * + * We pass the VectorizedRowBatch through here. + * + * @return Return true if the operator tree is not done yet. + */ + private boolean deliverVectorizedRowBatch(Writable value) throws HiveException { + + batchCounter++; + if (value != null) { + VectorizedRowBatch batch = (VectorizedRowBatch) value; + numRows += batch.size; + if (hasRowIdentifier) { + if (batchContext.getRecordIdColumnVector() == null) { + setRowIdentiferToNull(batch); + } else { + batch.cols[rowIdentifierColumnNum] = batchContext.getRecordIdColumnVector(); + } + } + } + oneRootOperator.process(value, 0); + if (oneRootOperator.getDone()) { + setDone(true); + return false; + } + return true; + } + @Override public void process(Writable value) throws HiveException { @@ -798,30 +843,33 @@ public void process(Writable value) throws HiveException { try { if (currentReadType == VectorMapOperatorReadType.VECTORIZED_INPUT_FILE_FORMAT) { + if (!deliverVectorizedRowBatch(value)) { + + // Operator tree is now done. + return; + } + + } else if (value instanceof VectorizedRowBatch) { + /* - * The Vectorized Input File Format reader has already set the partition column - * values, reset and filled in the batch, etc. - * - * We pass the VectorizedRowBatch through here. + * This case can happen with LLAP. If it is able to deserialize and cache data from the + * input format, it will deliver that cached data to us as VRBs. */ - batchCounter++; - if (value != null) { - VectorizedRowBatch batch = (VectorizedRowBatch) value; - numRows += batch.size; - if (hasRowIdentifier) { - if (batchContext.getRecordIdColumnVector() == null) { - setRowIdentiferToNull(batch); - } else { - batch.cols[rowIdentifierColumnNum] = batchContext.getRecordIdColumnVector(); - } - } - } - oneRootOperator.process(value, 0); - if (oneRootOperator.getDone()) { - setDone(true); + + /* + * Clear out any rows we may have processed in row-mode for the current partition.. + */ + if (!flushDeserializerBatch()) { + + // Operator tree is now done. return; } + if (!deliverVectorizedRowBatch(value)) { + + // Operator tree is now done. + return; + } } else { /* diff --git ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index 2687d33..7d3ff36 100755 --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -222,7 +222,7 @@ public void configure(JobConf job) { String ifName = inputFormat.getClass().getCanonicalName(); boolean isSupported = inputFormat instanceof LlapWrappableInputFormatInterface; boolean isCacheOnly = inputFormat instanceof LlapCacheOnlyInputFormatInterface; - boolean isVectorized = Utilities.getUseVectorizedInputFileFormat(conf); + boolean isVectorized = Utilities.getIsVectorized(conf); if (!isVectorized) { // Pretend it's vectorized if the non-vector wrapped is enabled. isVectorized = HiveConf.getBoolVar(conf, ConfVars.LLAP_IO_NONVECTOR_WRAPPER_ENABLED) diff --git ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java index ce5dae0..6a372a3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java @@ -71,7 +71,7 @@ public DummyInputSplit(String path) { private boolean addPartitionCols = true; public NullRowsRecordReader(Configuration conf, InputSplit split) throws IOException { - boolean isVectorMode = Utilities.getUseVectorizedInputFileFormat(conf); + boolean isVectorMode = Utilities.getIsVectorized(conf); if (LOG.isDebugEnabled()) { LOG.debug(getClass().getSimpleName() + " in " + (isVectorMode ? "" : "non-") + "vector mode"); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java index e92bcb4..ff2cc04 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java @@ -506,7 +506,7 @@ public boolean validateInput(FileSystem fs, HiveConf conf, List files ) throws IOException { - if (Utilities.getUseVectorizedInputFileFormat(conf)) { + if (Utilities.getIsVectorized(conf)) { return new VectorizedOrcInputFormat().validateInput(fs, conf, files); } @@ -1890,7 +1890,7 @@ private static void scheduleSplits(ETLSplitStrategy splitStrategy, Context conte getRecordReader(InputSplit inputSplit, JobConf conf, Reporter reporter) throws IOException { //CombineHiveInputFormat may produce FileSplit that is not OrcSplit - boolean vectorMode = Utilities.getUseVectorizedInputFileFormat(conf); + boolean vectorMode = Utilities.getIsVectorized(conf); boolean isAcidRead = isAcidRead(conf, inputSplit); if (!isAcidRead) { if (vectorMode) { diff --git ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java index ab8c0ca..10e6a18 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java @@ -74,7 +74,7 @@ protected MapredParquetInputFormat(final ParquetInputFormat input final org.apache.hadoop.mapred.Reporter reporter ) throws IOException { try { - if (Utilities.getUseVectorizedInputFileFormat(job)) { + if (Utilities.getIsVectorized(job)) { if (LOG.isDebugEnabled()) { LOG.debug("Using vectorized record reader"); } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index 215aaad..959ce53 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -27,6 +27,7 @@ import java.util.Collection; import java.util.HashMap; import java.util.HashSet; +import java.util.Hashtable; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -35,6 +36,7 @@ import java.util.Properties; import java.util.Set; import java.util.Stack; +import java.util.TreeMap; import java.util.TreeSet; import java.util.regex.Pattern; @@ -1111,6 +1113,25 @@ private void handleSupport( } /* + * Add a vector partition descriptor to partition descriptor, removing duplicate object. + * + * If the same vector partition descriptor has already been allocated, share that object. + */ + private void addVectorPartitionDesc(PartitionDesc pd, VectorPartitionDesc vpd, + Map vectorPartitionDescMap) { + + VectorPartitionDesc existingEle = vectorPartitionDescMap.get(vpd); + if (existingEle != null) { + + // Use the object we already have. + vpd = existingEle; + } else { + vectorPartitionDescMap.put(vpd, vpd); + } + pd.setVectorPartitionDesc(vpd); + } + + /* * There are 3 modes of reading for vectorization: * * 1) One for the Vectorized Input File Format which returns VectorizedRowBatch as the row. @@ -1127,6 +1148,7 @@ private void handleSupport( private boolean verifyAndSetVectorPartDesc( PartitionDesc pd, boolean isAcidTable, Set inputFileFormatClassNameSet, + Map vectorPartitionDescMap, Set enabledConditionsMetSet, ArrayList enabledConditionsNotMetList, Set newSupportSet) { @@ -1154,9 +1176,11 @@ private boolean verifyAndSetVectorPartDesc( addVectorizedInputFileFormatSupport( newSupportSet, isInputFileFormatVectorized, inputFileFormatClass); - pd.setVectorPartitionDesc( + addVectorPartitionDesc( + pd, VectorPartitionDesc.createVectorizedInputFileFormat( - inputFileFormatClassName, Utilities.isInputFileFormatSelfDescribing(pd))); + inputFileFormatClassName, Utilities.isInputFileFormatSelfDescribing(pd)), + vectorPartitionDescMap); enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname); return true; @@ -1173,9 +1197,11 @@ private boolean verifyAndSetVectorPartDesc( addVectorizedInputFileFormatSupport( newSupportSet, isInputFileFormatVectorized, inputFileFormatClass); - pd.setVectorPartitionDesc( + addVectorPartitionDesc( + pd, VectorPartitionDesc.createVectorizedInputFileFormat( - inputFileFormatClassName, Utilities.isInputFileFormatSelfDescribing(pd))); + inputFileFormatClassName, Utilities.isInputFileFormatSelfDescribing(pd)), + vectorPartitionDescMap); enabledConditionsMetSet.add( HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname); @@ -1239,18 +1265,22 @@ private boolean verifyAndSetVectorPartDesc( // Add the support for read variations in Vectorized Text. newSupportSet.addAll(vectorDeserializeTextSupportSet); - pd.setVectorPartitionDesc( + addVectorPartitionDesc( + pd, VectorPartitionDesc.createVectorDeserialize( - inputFileFormatClassName, VectorDeserializeType.LAZY_SIMPLE)); + inputFileFormatClassName, VectorDeserializeType.LAZY_SIMPLE), + vectorPartitionDescMap); enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTOR_DESERIALIZE.varname); return true; } } else if (isSequenceFormat) { - pd.setVectorPartitionDesc( + addVectorPartitionDesc( + pd, VectorPartitionDesc.createVectorDeserialize( - inputFileFormatClassName, VectorDeserializeType.LAZY_BINARY)); + inputFileFormatClassName, VectorDeserializeType.LAZY_BINARY), + vectorPartitionDescMap); enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTOR_DESERIALIZE.varname); return true; @@ -1262,15 +1292,21 @@ private boolean verifyAndSetVectorPartDesc( // inspect-able Object[] row to a VectorizedRowBatch in the VectorMapOperator. if (useRowDeserialize) { - if (!isInputFormatExcluded(inputFileFormatClassName, rowDeserializeInputFormatExcludes)) { - pd.setVectorPartitionDesc( + boolean isRowDeserializeExcluded = + isInputFormatExcluded(inputFileFormatClassName, rowDeserializeInputFormatExcludes); + if (!isRowDeserializeExcluded && !isInputFileFormatVectorized) { + addVectorPartitionDesc( + pd, VectorPartitionDesc.createRowDeserialize( inputFileFormatClassName, Utilities.isInputFileFormatSelfDescribing(pd), - deserializerClassName)); - + deserializerClassName), + vectorPartitionDescMap); + enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_ROW_DESERIALIZE.varname); return true; + } else if (isInputFileFormatVectorized) { + enabledConditionsNotMetList.add("Row deserialization of vectorized input format not supported"); } else { enabledConditionsNotMetList.add(ConfVars.HIVE_VECTORIZATION_USE_ROW_DESERIALIZE.varname + " IS true AND " + ConfVars.HIVE_VECTORIZATION_ROW_DESERIALIZE_INPUTFORMAT_EXCLUDES.varname @@ -1368,7 +1404,9 @@ private boolean isInputFormatExcluded(String inputFileFormatClassName, Collectio LinkedHashMap pathToPartitionInfo = mapWork.getPathToPartitionInfo(); // Remember the input file formats we validated and why. - Set inputFileFormatClassNameSet = new HashSet(); + Set inputFileFormatClassNameSet = new TreeSet(); + Map vectorPartitionDescMap = + new LinkedHashMap(); Set enabledConditionsMetSet = new HashSet(); ArrayList enabledConditionsNotMetList = new ArrayList(); Set inputFormatSupportSet = new TreeSet(); @@ -1395,11 +1433,17 @@ private boolean isInputFormatExcluded(String inputFileFormatClassName, Collectio if (!verifyAndSetVectorPartDesc( partDesc, isAcidTable, inputFileFormatClassNameSet, + vectorPartitionDescMap, enabledConditionsMetSet, enabledConditionsNotMetList, newSupportSet)) { // Always set these so EXPLAIN can see. - mapWork.setVectorizationInputFileFormatClassNameSet(inputFileFormatClassNameSet); + ArrayList inputFileFormatClassNameList = new ArrayList(); + inputFileFormatClassNameList.addAll(inputFileFormatClassNameSet); + mapWork.setVectorizationInputFileFormatClassNameList(inputFileFormatClassNameList); + ArrayList vectorPartitionDescList = new ArrayList(); + vectorPartitionDescList.addAll(vectorPartitionDescMap.keySet()); + mapWork.setVectorPartitionDescList(vectorPartitionDescList); mapWork.setVectorizationEnabledConditionsMet(new ArrayList(enabledConditionsMetSet)); mapWork.setVectorizationEnabledConditionsNotMet(enabledConditionsNotMetList); @@ -1516,7 +1560,12 @@ private boolean isInputFormatExcluded(String inputFileFormatClassName, Collectio vectorTaskColumnInfo.setInputFormatSupportSet(inputFormatSupportSet); // Always set these so EXPLAIN can see. - mapWork.setVectorizationInputFileFormatClassNameSet(inputFileFormatClassNameSet); + ArrayList inputFileFormatClassNameList = new ArrayList(); + inputFileFormatClassNameList.addAll(inputFileFormatClassNameSet); + mapWork.setVectorizationInputFileFormatClassNameList(inputFileFormatClassNameList); + ArrayList vectorPartitionDescList = new ArrayList(); + vectorPartitionDescList.addAll(vectorPartitionDescMap.keySet()); + mapWork.setVectorPartitionDescList(vectorPartitionDescList); mapWork.setVectorizationEnabledConditionsMet(new ArrayList(enabledConditionsMetSet)); mapWork.setVectorizationEnabledConditionsNotMet(enabledConditionsNotMetList); @@ -1624,7 +1673,7 @@ private boolean validateAndVectorizeMapWork(MapWork mapWork, VectorTaskColumnInf neededVirtualColumnSet = new HashSet(); mapWork.setVectorizationEnabled(true); - LOG.info("Vectorization is enabled for input format(s) " + mapWork.getVectorizationInputFileFormatClassNameSet().toString()); + LOG.info("Vectorization is enabled for input format(s) " + mapWork.getVectorizationInputFileFormatClassNameList().toString()); //-------------------------------------------------------------------------------------------- diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java index 92946f5..5b43c60 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java @@ -148,7 +148,8 @@ private VectorizerReason notEnabledInputFileFormatReason; - private Set vectorizationInputFileFormatClassNameSet; + private List vectorizationInputFileFormatClassNameList; + private List vectorPartitionDescList; private List vectorizationEnabledConditionsMet; private List vectorizationEnabledConditionsNotMet; @@ -268,7 +269,7 @@ public void deriveLlap(Configuration conf, boolean isExecDriver) { boolean canWrapAny = false, doCheckIfs = false; if (isLlapOn) { // We can wrap inputs if the execution is vectorized, or if we use a wrapper. - canWrapAny = Utilities.getUseVectorizedInputFileFormat(conf, this); + canWrapAny = Utilities.getIsVectorized(conf, this); // ExecDriver has no plan path, so we cannot derive VRB stuff for the wrapper. if (!canWrapAny && !isExecDriver) { canWrapAny = HiveConf.getBoolVar(conf, ConfVars.LLAP_IO_NONVECTOR_WRAPPER_ENABLED); @@ -784,12 +785,20 @@ public VectorizerReason getNotEnabledInputFileFormatReason() { return notEnabledInputFileFormatReason; } - public void setVectorizationInputFileFormatClassNameSet(Set vectorizationInputFileFormatClassNameSet) { - this.vectorizationInputFileFormatClassNameSet = vectorizationInputFileFormatClassNameSet; + public void setVectorizationInputFileFormatClassNameList(List vectorizationInputFileFormatClassNameList) { + this.vectorizationInputFileFormatClassNameList = vectorizationInputFileFormatClassNameList; } - public Set getVectorizationInputFileFormatClassNameSet() { - return vectorizationInputFileFormatClassNameSet; + public List getVectorizationInputFileFormatClassNameList() { + return vectorizationInputFileFormatClassNameList; + } + + public void setVectorPartitionDescList(List vectorPartitionDescList) { + this.vectorPartitionDescList = vectorPartitionDescList; + } + + public List getVectorPartitionDescList() { + return vectorPartitionDescList; } public void setVectorizationEnabledConditionsMet(ArrayList vectorizationEnabledConditionsMet) { @@ -818,8 +827,13 @@ public MapExplainVectorization(MapWork mapWork) { } @Explain(vectorization = Vectorization.SUMMARY, displayName = "inputFileFormats", explainLevels = { Level.DEFAULT, Level.EXTENDED }) - public Set inputFileFormats() { - return mapWork.getVectorizationInputFileFormatClassNameSet(); + public String inputFileFormats() { + return mapWork.getVectorizationInputFileFormatClassNameList().toString(); + } + + @Explain(vectorization = Vectorization.DETAIL, displayName = "vectorPartitionDescs", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public String vectorPartitionDescs() { + return mapWork.getVectorPartitionDescList().toString(); } @Explain(vectorization = Vectorization.SUMMARY, displayName = "inputFormatFeatureSupport", explainLevels = { Level.DEFAULT, Level.EXTENDED }) diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/VectorPartitionDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/VectorPartitionDesc.java index 787d0c2..2c8904d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/VectorPartitionDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/VectorPartitionDesc.java @@ -227,24 +227,25 @@ public int getDataColumnCount() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("vector map operator read type "); + sb.append("("); sb.append(vectorMapOperatorReadType.name()); - sb.append(", input file format class name "); + sb.append(", "); sb.append(inputFileFormatClassName); switch (vectorMapOperatorReadType) { case VECTORIZED_INPUT_FILE_FORMAT: break; case VECTOR_DESERIALIZE: - sb.append(", deserialize type "); + sb.append(", "); sb.append(vectorDeserializeType.name()); break; case ROW_DESERIALIZE: - sb.append(", deserializer class name "); + sb.append(", "); sb.append(rowDeserializerClassName); break; default: throw new RuntimeException("Unexpected vector map operator read type " + vectorMapOperatorReadType.name()); } + sb.append(")"); return sb.toString(); } diff --git ql/src/test/queries/clientpositive/schema_evol_orc_acid_part_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_acid_part_llap_io.q new file mode 100644 index 0000000..a526f57 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_acid_part_llap_io.q @@ -0,0 +1,320 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +SET hive.exec.schema.evolution=false; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, ACID Non-Vectorized, MapWork, Partitioned +-- *IMPORTANT NOTE* We set hive.exec.schema.evolution=false above since schema evolution is always used for ACID. +-- Also, we don't do regular EXPLAINs on ACID files because the transaction id causes Q file statistics differences... +-- Instead just one explain vectorization only detail +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_permute_select add columns(c int); + +insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333); + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_permute_select; +select insert_num,part,a,b,c from part_add_int_permute_select; +select insert_num,part,c from part_add_int_permute_select; + +drop table part_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_string_permute_select add columns(c int, d string); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444'); + +explain vectorization only detail +select insert_num,part,a,b,c,d from part_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_string_permute_select; +select insert_num,part,a,b,c from part_add_int_string_permute_select; +select insert_num,part,a,b,c,d from part_add_int_string_permute_select; +select insert_num,part,a,c,d from part_add_int_string_permute_select; +select insert_num,part,a,d from part_add_int_string_permute_select; +select insert_num,part,c from part_add_int_string_permute_select; +select insert_num,part,d from part_add_int_string_permute_select; + +drop table part_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111; + +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +drop table part_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +drop table part_change_date_group_string_group_date_timestamp; + + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +drop table part_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +drop table part_change_numeric_group_string_group_floating_string_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +drop table part_change_string_group_string_group_string; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table part_change_lower_to_higher_numeric_group_decimal_to_float; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/schema_evol_orc_acid_part_update_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_acid_part_update_llap_io.q new file mode 100644 index 0000000..1866fc8 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_acid_part_update_llap_io.q @@ -0,0 +1,161 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +SET hive.exec.schema.evolution=false; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, ACID Non-Vectorized, MapWork, Partitioned +-- *IMPORTANT NOTE* We set hive.exec.schema.evolution=false above since schema evolution is always used for ACID. +-- Also, we don't do EXPLAINs on ACID files because the transaction id causes Q file statistics differences... +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... UPDATE New Columns +--- +CREATE TABLE partitioned_update_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table partitioned_update_1 add columns(c int, d string); + +insert into table partitioned_update_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110; + +insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110; + +select insert_num,part,a,b,c,d from partitioned_update_1; + +-- UPDATE New Columns +update partitioned_update_1 set c=99; + +select insert_num,part,a,b,c,d from partitioned_update_1; + +alter table partitioned_update_1 partition(part=1) compact 'major'; +alter table partitioned_update_1 partition(part=2) compact 'major'; + +select insert_num,part,a,b,c,d from partitioned_update_1; + +DROP TABLE partitioned_update_1; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... DELETE where old column +--- +CREATE TABLE partitioned_delete_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table partitioned_delete_1 add columns(c int, d string); + +insert into table partitioned_delete_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110; + +insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110; + +select part,a,b,c,d from partitioned_delete_1; + +-- DELETE where old column +delete from partitioned_delete_1 where insert_num = 102 or insert_num = 104 or insert_num = 106; + +select insert_num,part,a,b,c,d from partitioned_delete_1; + +alter table partitioned_delete_1 partition(part=1) compact 'major'; +alter table partitioned_delete_1 partition(part=2) compact 'major'; + +select insert_num,part,a,b,c,d from partitioned_delete_1; + +DROP TABLE partitioned_delete_1; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... DELETE where new column +--- +CREATE TABLE partitioned_delete_2(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table partitioned_delete_2 add columns(c int, d string); + +insert into table partitioned_delete_2 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110; + +insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110; + +select insert_num,part,a,b,c,d from partitioned_delete_2; + +-- DELETE where new column +delete from partitioned_delete_2 where insert_num = 108 or insert_num > 113; + +select insert_num,part,a,b,c,d from partitioned_delete_2; + +alter table partitioned_delete_2 partition(part=1) compact 'major'; +alter table partitioned_delete_2 partition(part=2) compact 'major'; + +select insert_num,part,a,b,c,d from partitioned_delete_2; + +DROP TABLE partitioned_delete_2; + +--following tests is moved from system tests +drop table if exists missing_ddl_2; +create table missing_ddl_2(name string, age int); +insert overwrite table missing_ddl_2 select value, key from srcbucket; +alter table missing_ddl_2 add columns (gps double); + +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.optimize.sort.dynamic.partition=true; + +DROP TABLE IF EXISTS all100kjson_textfile_orc; +CREATE TABLE all100kjson_textfile_orc ( + si smallint, + i int, + b bigint, + f float, + d double, + s string, + bo boolean, + ts timestamp) + PARTITIONED BY (t tinyint) + ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' + WITH SERDEPROPERTIES ('timestamp.formats'='yyyy-MM-dd\'T\'HH:mm:ss') + STORED AS TEXTFILE; + +INSERT INTO TABLE all100kjson_textfile_orc PARTITION (t) SELECT csmallint, cint, cbigint, cfloat, cdouble, cstring1, cboolean1, ctimestamp1, ctinyint FROM alltypesorc WHERE ctinyint > 0; + +ALTER TABLE all100kjson_textfile_orc + SET FILEFORMAT + INPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' + OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' + SERDE 'org.apache.hadoop.hive.ql.io.orc.OrcSerde'; + +INSERT INTO TABLE all100kjson_textfile_orc PARTITION (t) SELECT csmallint, cint, cbigint, cfloat, cdouble, cstring1, cboolean1, ctimestamp1, ctinyint FROM alltypesorc WHERE ctinyint < 1 and ctinyint > -50 ; + +-- HIVE-11977: Hive should handle an external avro table with zero length files present +DROP TABLE IF EXISTS emptyavro; +CREATE TABLE emptyavro (i int) + PARTITIONED BY (s string) + STORED AS AVRO; +load data local inpath '../../data/files/empty1.txt' into table emptyavro PARTITION (s='something'); +SELECT COUNT(*) from emptyavro; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/schema_evol_orc_acid_table_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_acid_table_llap_io.q new file mode 100644 index 0000000..66e6da4 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_acid_table_llap_io.q @@ -0,0 +1,318 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.cli.print.header=true; +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +SET hive.exec.schema.evolution=false; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, ACID Non-Vectorized, MapWork, Table +-- *IMPORTANT NOTE* We set hive.exec.schema.evolution=false above since schema evolution is always used for ACID. +-- Also, we don't do regular EXPLAINs on ACID files because the transaction id causes Q file statistics differences... +-- Instead just one explain vectorization only detail +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_permute_select add columns(c int); + +insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000); + +explain vectorization only detail +select insert_num,a,b,c from table_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_permute_select; +select insert_num,a,b,c from table_add_int_permute_select; +select insert_num,c from table_add_int_permute_select; + +drop table table_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_string_permute_select add columns(c int, d string); + +insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler'); + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_string_permute_select; +select insert_num,a,b,c from table_add_int_string_permute_select; +select insert_num,a,b,c,d from table_add_int_string_permute_select; +select insert_num,a,c,d from table_add_int_string_permute_select; +select insert_num,a,d from table_add_int_string_permute_select; +select insert_num,c from table_add_int_string_permute_select; +select insert_num,d from table_add_int_string_permute_select; + +drop table table_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new'); + +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +drop table table_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group; + +drop table table_change_date_group_string_group_date_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +drop table table_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +drop table table_change_numeric_group_string_group_floating_string_group; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string; + +drop table table_change_string_group_string_group_string; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table table_change_lower_to_higher_numeric_group_decimal_to_float; diff --git ql/src/test/queries/clientpositive/schema_evol_orc_acid_table_update_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_acid_table_update_llap_io.q new file mode 100644 index 0000000..16df15a --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_acid_table_update_llap_io.q @@ -0,0 +1,113 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.cli.print.header=true; +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +SET hive.exec.schema.evolution=false; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, ACID Non-Vectorized, MapWork, Table +-- *IMPORTANT NOTE* We set hive.exec.schema.evolution=false above since schema evolution is always used for ACID. +-- Also, we don't do EXPLAINs on ACID files because the transaction id causes Q file statistics differences... +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... UPDATE New Columns +--- +CREATE TABLE table5(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table5 SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table5 add columns(c int, d string); + +insert into table table5 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2; + +select a,b,c,d from table5; + +-- UPDATE New Columns +update table5 set c=99; + +select a,b,c,d from table5; + +alter table table5 compact 'major'; + +select a,b,c,d from table5; + +DROP TABLE table5; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... DELETE where old column +--- +CREATE TABLE table6(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table6 SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table6 add columns(c int, d string); + +insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110; + +insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110; + +select a,b,c,d from table6; + +-- DELETE where old column +delete from table6 where insert_num = 102 or insert_num = 104 or insert_num = 106; + +select a,b,c,d from table6; + +alter table table6 compact 'major'; + +select a,b,c,d from table6; + +DROP TABLE table6; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... DELETE where new column +--- +CREATE TABLE table7(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table7 SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table7 add columns(c int, d string); + +insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110; + +insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110; + +select a,b,c,d from table7; + +-- DELETE where new column +delete from table7 where insert_num = 107 or insert_num >= 110; + +select a,b,c,d from table7; + +alter table table7 compact 'major'; + +select a,b,c,d from table7; + +DROP TABLE table7; diff --git ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_part_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_part_llap_io.q new file mode 100644 index 0000000..e676346 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_part_llap_io.q @@ -0,0 +1,359 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +SET hive.exec.schema.evolution=false; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, ACID Vectorized, MapWork, Partitioned +-- *IMPORTANT NOTE* We set hive.exec.schema.evolution=false above since schema evolution is always used for ACID. +-- Also, we don't do regular EXPLAINs on ACID files because the transaction id causes Q file statistics differences... +-- Instead explain vectorization only detail +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_permute_select add columns(c int); + +insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333); + +explain vectorization only detail +select insert_num,part,a,b,c from part_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_permute_select; +select insert_num,part,a,b,c from part_add_int_permute_select; +select insert_num,part,c from part_add_int_permute_select; + +drop table part_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_string_permute_select add columns(c int, d string); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444'); + +explain vectorization only detail +select insert_num,part,a,b,c,d from part_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_string_permute_select; +select insert_num,part,a,b,c from part_add_int_string_permute_select; +select insert_num,part,a,b,c,d from part_add_int_string_permute_select; +select insert_num,part,a,c,d from part_add_int_string_permute_select; +select insert_num,part,a,d from part_add_int_string_permute_select; +select insert_num,part,c from part_add_int_string_permute_select; +select insert_num,part,d from part_add_int_string_permute_select; + +drop table part_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111; + +explain vectorization only detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +drop table part_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +drop table part_change_date_group_string_group_date_timestamp; + + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +drop table part_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +drop table part_change_numeric_group_string_group_floating_string_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string; + +select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +drop table part_change_string_group_string_group_string; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +explain vectorization only detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +explain vectorization only detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table part_change_lower_to_higher_numeric_group_decimal_to_float; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_part_update_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_part_update_llap_io.q new file mode 100644 index 0000000..2b404ba --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_part_update_llap_io.q @@ -0,0 +1,119 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +SET hive.exec.schema.evolution=false; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, ACID Non-Vectorized, MapWork, Partitioned +-- *IMPORTANT NOTE* We set hive.exec.schema.evolution=false above since schema evolution is always used for ACID. +-- Also, we don't do EXPLAINs on ACID files because the transaction id causes Q file statistics differences... +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... UPDATE New Columns +--- +CREATE TABLE partitioned_update_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table partitioned_update_1 add columns(c int, d string); + +insert into table partitioned_update_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110; + +insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110; + +select insert_num,part,a,b,c,d from partitioned_update_1; + +-- UPDATE New Columns +update partitioned_update_1 set c=99; + +select insert_num,part,a,b,c,d from partitioned_update_1; + +alter table partitioned_update_1 partition(part=1) compact 'major'; +alter table partitioned_update_1 partition(part=2) compact 'major'; + +select insert_num,part,a,b,c,d from partitioned_update_1; + +DROP TABLE partitioned_update_1; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... DELETE where old column +--- +CREATE TABLE partitioned_delete_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table partitioned_delete_1 add columns(c int, d string); + +insert into table partitioned_delete_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110; + +insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110; + +select part,a,b,c,d from partitioned_delete_1; + +-- DELETE where old column +delete from partitioned_delete_1 where insert_num = 102 or insert_num = 104 or insert_num = 106; + +select insert_num,part,a,b,c,d from partitioned_delete_1; + +alter table partitioned_delete_1 partition(part=1) compact 'major'; +alter table partitioned_delete_1 partition(part=2) compact 'major'; + +select insert_num,part,a,b,c,d from partitioned_delete_1; + +DROP TABLE partitioned_delete_1; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... DELETE where new column +--- +CREATE TABLE partitioned_delete_2(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table partitioned_delete_2 add columns(c int, d string); + +insert into table partitioned_delete_2 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110; + +insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110; + +select insert_num,part,a,b,c,d from partitioned_delete_2; + +-- DELETE where new column +delete from partitioned_delete_2 where insert_num = 108 or insert_num > 113; + +select insert_num,part,a,b,c,d from partitioned_delete_2; + +alter table partitioned_delete_2 partition(part=1) compact 'major'; +alter table partitioned_delete_2 partition(part=2) compact 'major'; + +select insert_num,part,a,b,c,d from partitioned_delete_2; + +DROP TABLE partitioned_delete_2; diff --git ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_table_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_table_llap_io.q new file mode 100644 index 0000000..b599ed5 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_table_llap_io.q @@ -0,0 +1,357 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.cli.print.header=true; +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +SET hive.exec.schema.evolution=false; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, ACID Vectorized, MapWork, Table +-- *IMPORTANT NOTE* We set hive.exec.schema.evolution=false above since schema evolution is always used for ACID. +-- Also, we don't do regular EXPLAINs on ACID files because the transaction id causes Q file statistics differences... +-- Instead explain vectorization only detail +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_permute_select add columns(c int); + +insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000); + +explain vectorization only detail +select insert_num,a,b,c from table_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_permute_select; +select insert_num,a,b,c from table_add_int_permute_select; +select insert_num,c from table_add_int_permute_select; + +drop table table_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_string_permute_select add columns(c int, d string); + +insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler'); + +explain vectorization only detail +select insert_num,a,b,c,d from table_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_string_permute_select; +select insert_num,a,b,c from table_add_int_string_permute_select; +select insert_num,a,b,c,d from table_add_int_string_permute_select; +select insert_num,a,c,d from table_add_int_string_permute_select; +select insert_num,a,d from table_add_int_string_permute_select; +select insert_num,c from table_add_int_string_permute_select; +select insert_num,d from table_add_int_string_permute_select; + +drop table table_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new'); + +explain vectorization only detail +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +drop table table_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group; + +drop table table_change_date_group_string_group_date_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +drop table table_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +drop table table_change_numeric_group_string_group_floating_string_group; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +explain vectorization only detail +select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string; + +select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string; + +drop table table_change_string_group_string_group_string; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +explain vectorization only detail +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +explain vectorization only detail +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table table_change_lower_to_higher_numeric_group_decimal_to_float; diff --git ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_table_update_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_table_update_llap_io.q new file mode 100644 index 0000000..8e3ba40 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_acidvec_table_update_llap_io.q @@ -0,0 +1,113 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.cli.print.header=true; +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +SET hive.exec.schema.evolution=false; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, ACID Vectorized, MapWork, Table +-- *IMPORTANT NOTE* We set hive.exec.schema.evolution=false above since schema evolution is always used for ACID. +-- Also, we don't do EXPLAINs on ACID files because the transaction id causes Q file statistics differences... +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... UPDATE New Columns +--- +CREATE TABLE table5(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table5 SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table5 add columns(c int, d string); + +insert into table table5 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2; + +select a,b,c,d from table5; + +-- UPDATE New Columns +update table5 set c=99; + +select a,b,c,d from table5; + +alter table table5 compact 'major'; + +select a,b,c,d from table5; + +DROP TABLE table5; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... DELETE where old column +--- +CREATE TABLE table6(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table6 SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table6 add columns(c int, d string); + +insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110; + +insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110; + +select a,b,c,d from table6; + +-- DELETE where old column +delete from table6 where insert_num = 102 or insert_num = 104 or insert_num = 106; + +select a,b,c,d from table6; + +alter table table6 compact 'major'; + +select a,b,c,d from table6; + +DROP TABLE table6; + +-- +-- +-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... DELETE where new column +--- +CREATE TABLE table7(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true'); + +insert into table table7 SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table7 add columns(c int, d string); + +insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110; + +insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110; + +select a,b,c,d from table7; + +-- DELETE where new column +delete from table7 where insert_num = 107 or insert_num >= 110; + +select a,b,c,d from table7; + +alter table table7 compact 'major'; + +select a,b,c,d from table7; + +DROP TABLE table7; diff --git ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_complex_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_complex_llap_io.q new file mode 100644 index 0000000..c401b25 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_complex_llap_io.q @@ -0,0 +1,164 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=false; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, Non-Vectorized, MapWork, Partitioned --> all complex conversions +-- +------------------------------------------------------------------------------------------ +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: STRUCT --> STRUCT, b STRING) PARTITIONED BY(part INT); + +CREATE TABLE complex_struct1_a_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_a.txt' overwrite into table complex_struct1_a_txt; + +insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_a_txt; + +select insert_num,part,s1,b from part_change_various_various_struct1; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_struct1 replace columns (insert_num int, s1 STRUCT, b STRING); + +CREATE TABLE complex_struct1_b_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_b.txt' overwrite into table complex_struct1_b_txt; + +insert into table part_change_various_various_struct1 partition(part=2) select * from complex_struct1_b_txt; + +CREATE TABLE complex_struct1_c_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_c.txt' overwrite into table complex_struct1_c_txt; + +insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_c_txt; + +explain vectorization detail +select insert_num,part,s1,b from part_change_various_various_struct1; + +select insert_num,part,s1,b from part_change_various_various_struct1; + +drop table part_change_various_various_struct1; + + + +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS for Various --> Various: STRUCT +-- +CREATE TABLE part_add_various_various_struct2(insert_num int, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_various_various_struct2 partition(part=1) + values(1, 'original'), + (2, 'original'); + +select insert_num,part,b from part_add_various_various_struct2; + +-- Table-Non-Cascade ADD COLUMN ... +alter table part_add_various_various_struct2 ADD columns (s2 STRUCT); + +CREATE TABLE complex_struct2_a_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_a.txt' overwrite into table complex_struct2_a_txt; + +insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_a_txt; + +CREATE TABLE complex_struct2_b_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_b.txt' overwrite into table complex_struct2_b_txt; + +insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_b_txt; + +select insert_num,part,b,s2 from part_add_various_various_struct2; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_add_various_various_struct2 REPLACE columns (insert_num int, b STRING, s2 STRUCT); + +CREATE TABLE complex_struct2_c_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_c.txt' overwrite into table complex_struct2_c_txt; + +insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_c_txt; + +CREATE TABLE complex_struct2_d_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_d.txt' overwrite into table complex_struct2_d_txt; + +insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_d_txt; + +explain vectorization detail +select insert_num,part,b,s2 from part_add_various_various_struct2; + +select insert_num,part,b,s2 from part_add_various_various_struct2; + +drop table part_add_various_various_struct2; + + + + +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS for Various --> Various: ADD COLUMNS to STRUCT type as LAST column of 3 columns +-- +CREATE TABLE part_add_to_various_various_struct4(insert_num int, b STRING, s3 STRUCT) PARTITIONED BY(part INT); + +CREATE TABLE complex_struct4_a_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_a.txt' overwrite into table complex_struct4_a_txt; + +insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_a_txt; + +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_add_to_various_various_struct4 replace columns (insert_num int, b STRING, s3 STRUCT); + +CREATE TABLE complex_struct4_b_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_b.txt' overwrite into table complex_struct4_b_txt; + +insert into table part_add_to_various_various_struct4 partition(part=2) select * from complex_struct4_b_txt; + +CREATE TABLE complex_struct4_c_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_c.txt' overwrite into table complex_struct4_c_txt; + +insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_c_txt; + +explain vectorization detail +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +drop table part_add_to_various_various_struct4; diff --git ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_primitive_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_primitive_llap_io.q new file mode 100644 index 0000000..a91fcef --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_all_primitive_llap_io.q @@ -0,0 +1,207 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=false; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, Non-Vectorized, MapWork, Partitioned --> all primitive conversions +-- +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS Various --> Various +-- +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2; + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: +-- (BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, TIMESTAMP) --> BOOLEAN and +-- (BOOLEAN, SHORT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> BYTE 128 and a maximum value of 127 and +-- (BOOLEAN, TINYINT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> SMALLINT -32768 and a maximum value of 32767 and +-- (BOOLEAN, TINYINT, SMALLINT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> INT –2147483648 to 2147483647 and +-- (BOOLEAN, TINYINT, SMALLINT, INT, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> BIGINT -9223372036854775808 to 9223372036854775807 +-- +CREATE TABLE part_change_various_various_boolean_to_bigint(insert_num int, + c1 TINYINT, c2 SMALLINT, c3 INT, c4 BIGINT, c5 FLOAT, c6 DOUBLE, c7 DECIMAL(38,18), c8 STRING, c9 TIMESTAMP, + c10 BOOLEAN, c11 SMALLINT, c12 INT, c13 BIGINT, c14 FLOAT, c15 DOUBLE, c16 DECIMAL(38,18), c17 STRING, c18 CHAR(25), c19 VARCHAR(25), c20 TIMESTAMP, + c21 BOOLEAN, c22 TINYINT, c23 INT, c24 BIGINT, c25 FLOAT, c26 DOUBLE, c27 DECIMAL(38,18), c28 STRING, c29 CHAR(25), c30 VARCHAR(25), c31 TIMESTAMP, + c32 BOOLEAN, c33 TINYINT, c34 SMALLINT, c35 BIGINT, c36 FLOAT, c37 DOUBLE, c38 DECIMAL(38,18), c39 STRING, c40 CHAR(25), c41 VARCHAR(25), c42 TIMESTAMP, + c43 BOOLEAN, c44 TINYINT, c45 SMALLINT, c46 INT, c47 FLOAT, c48 DOUBLE, c49 DECIMAL(38,18), c50 STRING, c51 CHAR(25), c52 VARCHAR(25), c53 TIMESTAMP, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, boolean_str, timestamp1, + boolean1, smallint1, int1, bigint1, float1, double1, decimal1, tinyint_str, tinyint_str, tinyint_str, timestamp1, + boolean1, tinyint1, int1, bigint1, float1, double1, decimal1, smallint_str, smallint_str, smallint_str, timestamp1, + boolean1, tinyint1, smallint1, bigint1, float1, double1, decimal1, int_str, int_str, int_str, timestamp1, + boolean1, tinyint1, smallint1, int1, float1, double1, decimal1, bigint_str, bigint_str, bigint_str, timestamp1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_boolean_to_bigint replace columns (insert_num int, + c1 BOOLEAN, c2 BOOLEAN, c3 BOOLEAN, c4 BOOLEAN, c5 BOOLEAN, c6 BOOLEAN, c7 BOOLEAN, c8 BOOLEAN, c9 BOOLEAN, + c10 TINYINT, c11 TINYINT, c12 TINYINT, c13 TINYINT, c14 TINYINT, c15 TINYINT, c16 TINYINT, c17 TINYINT, c18 TINYINT, c19 TINYINT, c20 TINYINT, + c21 SMALLINT, c22 SMALLINT, c23 SMALLINT, c24 SMALLINT, c25 SMALLINT, c26 SMALLINT, c27 SMALLINT, c28 SMALLINT, c29 SMALLINT, c30 SMALLINT, c31 SMALLINT, + c32 INT, c33 INT, c34 INT, c35 INT, c36 INT, c37 INT, c38 INT, c39 INT, c40 INT, c41 INT, c42 INT, + c43 BIGINT, c44 BIGINT, c45 BIGINT, c46 BIGINT, c47 BIGINT, c48 BIGINT, c49 BIGINT, c50 BIGINT, c51 BIGINT, c52 BIGINT, c53 BIGINT, + b STRING); + +insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, + bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, + 'new' FROM schema_evolution_data; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +drop table part_change_various_various_boolean_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DOUBLE, STRING, CHAR, VARCHAR, TIMESTAMP) --> DECIMAL +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> FLOAT and +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> DOUBLE and +-- +CREATE TABLE part_change_various_various_decimal_to_double(insert_num int, + c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 STRING, c9 CHAR(25), c10 VARCHAR(25), c11 TIMESTAMP, + c12 BOOLEAN, c13 TINYINT, c14 SMALLINT, c15 INT, c16 BIGINT, c17 DECIMAL(38,18), c18 DOUBLE, c19 STRING, c20 CHAR(25), c21 VARCHAR(25), c22 TIMESTAMP, + c23 BOOLEAN, c24 TINYINT, c25 SMALLINT, c26 INT, c27 BIGINT, c28 DECIMAL(38,18), c29 FLOAT, c30 STRING, c31 CHAR(25), c32 VARCHAR(25), c33 TIMESTAMP, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal_str, decimal_str, decimal_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, double1, float_str, float_str, float_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, float1, double_str, double_str, double_str, timestamp1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_decimal_to_double replace columns (insert_num int, + c1 DECIMAL(38,18), c2 DECIMAL(38,18), c3 DECIMAL(38,18), c4 DECIMAL(38,18), c5 DECIMAL(38,18), c6 DECIMAL(38,18), c7 DECIMAL(38,18), c8 DECIMAL(38,18), c9 DECIMAL(38,18), c10 DECIMAL(38,18), c11 DECIMAL(38,18), + c12 FLOAT, c13 FLOAT, c14 FLOAT, c15 FLOAT, c16 FLOAT, c17 FLOAT, c18 FLOAT, c19 FLOAT, c20 FLOAT, c21 FLOAT, c22 FLOAT, + c23 DOUBLE, c24 DOUBLE, c25 DOUBLE, c26 DOUBLE, c27 DOUBLE, c28 DOUBLE, c29 DOUBLE, c30 DOUBLE, c31 DOUBLE, c32 DOUBLE, c33 DOUBLE, + b STRING); + +insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, + float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, + double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, + 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +drop table part_change_various_various_decimal_to_double; + + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DOUBLE, DECIMAL(38,18), STRING, CHAR, VARCHAR, DATE) --> TIMESTAMP +-- +CREATE TABLE part_change_various_various_timestamp(insert_num int, c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 DECIMAL(38,18), c9 STRING, c10 CHAR(25), c11 VARCHAR(25), c12 DATE, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, timestamp_str, timestamp_str, timestamp_str, date1, 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_timestamp replace columns (insert_num int, c1 TIMESTAMP, c2 TIMESTAMP, c3 TIMESTAMP, c4 TIMESTAMP, c5 TIMESTAMP, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, c11 TIMESTAMP, c12 TIMESTAMP, b STRING); + +insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +drop table part_change_various_various_timestamp; + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: (STRING, CHAR, VARCHAR, TIMESTAMP --> DATE +-- +CREATE TABLE part_change_various_various_date(insert_num int, c1 STRING, c2 CHAR(25), c3 VARCHAR(25), c4 TIMESTAMP, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date_str, date_str, date_str, timestamp1, 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_date replace columns (insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, b STRING); + +insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date1, date1, date1, date1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +drop table part_change_various_various_date; + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Same Type (CHAR, VARCHAR, DECIMAL) --> Different maxLength or precision/scale +-- +CREATE TABLE part_change_same_type_different_params(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) PARTITIONED BY(part INT); + +CREATE TABLE same_type1_a_txt(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_a.txt' overwrite into table same_type1_a_txt; + +insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_a_txt; + +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_same_type_different_params replace columns (insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING); + +CREATE TABLE same_type1_b_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_b.txt' overwrite into table same_type1_b_txt; + +insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_b_txt; + +CREATE TABLE same_type1_c_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_c.txt' overwrite into table same_type1_c_txt; + +insert into table part_change_same_type_different_params partition(part=2) select * from same_type1_c_txt; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +drop table part_change_same_type_different_params; diff --git ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_llap_io.q new file mode 100644 index 0000000..bb9bb01 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_part_llap_io.q @@ -0,0 +1,338 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, Non-Vectorized, MapWork, Partitioned +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_permute_select add columns(c int); + +insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333); + +explain vectorization detail +select insert_num,part,a,b from part_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_permute_select; +select insert_num,part,a,b,c from part_add_int_permute_select; +select insert_num,part,c from part_add_int_permute_select; + +drop table part_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_string_permute_select add columns(c int, d string); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444'); + +explain vectorization detail +select insert_num,part,a,b from part_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_string_permute_select; +select insert_num,part,a,b,c from part_add_int_string_permute_select; +select insert_num,part,a,b,c,d from part_add_int_string_permute_select; +select insert_num,part,a,c,d from part_add_int_string_permute_select; +select insert_num,part,a,d from part_add_int_string_permute_select; +select insert_num,part,c from part_add_int_string_permute_select; +select insert_num,part,d from part_add_int_string_permute_select; + +drop table part_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +drop table part_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +drop table part_change_date_group_string_group_date_timestamp; + + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +drop table part_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +drop table part_change_numeric_group_string_group_floating_string_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT); + +insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +drop table part_change_string_group_string_group_string; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table part_change_lower_to_higher_numeric_group_decimal_to_float; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_table_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_table_llap_io.q new file mode 100644 index 0000000..5312fbe --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_nonvec_table_llap_io.q @@ -0,0 +1,325 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +set hive.fetch.task.conversion=none; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, Non-Vectorized, MapWork, Table +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING); + +insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_permute_select add columns(c int); + +insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000); + +explain vectorization detail +select insert_num,a,b from table_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_permute_select; +select insert_num,a,b,c from table_add_int_permute_select; +select insert_num,c from table_add_int_permute_select; + +drop table table_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING); + +insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_string_permute_select add columns(c int, d string); + +insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler'); + +explain vectorization detail +select insert_num,a,b from table_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_string_permute_select; +select insert_num,a,b,c from table_add_int_string_permute_select; +select insert_num,a,b,c,d from table_add_int_string_permute_select; +select insert_num,a,c,d from table_add_int_string_permute_select; +select insert_num,a,d from table_add_int_string_permute_select; +select insert_num,c from table_add_int_string_permute_select; +select insert_num,d from table_add_int_string_permute_select; + +drop table table_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING); + +insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +drop table table_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING); + +insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group; + +drop table table_change_date_group_string_group_date_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING); + +insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +drop table table_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +drop table table_change_numeric_group_string_group_floating_string_group; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING); + +insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string; + +drop table table_change_string_group_string_group_string; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING); + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING); + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table table_change_lower_to_higher_numeric_group_decimal_to_float; diff --git ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_complex_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_complex_llap_io.q new file mode 100644 index 0000000..af6129e --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_complex_llap_io.q @@ -0,0 +1,164 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=false; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, Vectorized, MapWork, Partitioned --> all complex conversions +-- +------------------------------------------------------------------------------------------ +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: STRUCT --> STRUCT, b STRING) PARTITIONED BY(part INT); + +CREATE TABLE complex_struct1_a_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_a.txt' overwrite into table complex_struct1_a_txt; + +insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_a_txt; + +select insert_num,part,s1,b from part_change_various_various_struct1; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_struct1 replace columns (insert_num int, s1 STRUCT, b STRING); + +CREATE TABLE complex_struct1_b_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_b.txt' overwrite into table complex_struct1_b_txt; + +insert into table part_change_various_various_struct1 partition(part=2) select * from complex_struct1_b_txt; + +CREATE TABLE complex_struct1_c_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_c.txt' overwrite into table complex_struct1_c_txt; + +insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_c_txt; + + explain vectorization detail +select insert_num,part,s1,b from part_change_various_various_struct1; + +select insert_num,part,s1,b from part_change_various_various_struct1; + +drop table part_change_various_various_struct1; + + + +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS for Various --> Various: STRUCT +-- +CREATE TABLE part_add_various_various_struct2(insert_num int, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_various_various_struct2 partition(part=1) + values(1, 'original'), + (2, 'original'); + +select insert_num,part,b from part_add_various_various_struct2; + +-- Table-Non-Cascade ADD COLUMN ... +alter table part_add_various_various_struct2 ADD columns (s2 STRUCT); + +CREATE TABLE complex_struct2_a_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_a.txt' overwrite into table complex_struct2_a_txt; + +insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_a_txt; + +CREATE TABLE complex_struct2_b_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_b.txt' overwrite into table complex_struct2_b_txt; + +insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_b_txt; + +select insert_num,part,b,s2 from part_add_various_various_struct2; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_add_various_various_struct2 REPLACE columns (insert_num int, b STRING, s2 STRUCT); + +CREATE TABLE complex_struct2_c_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_c.txt' overwrite into table complex_struct2_c_txt; + +insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_c_txt; + +CREATE TABLE complex_struct2_d_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_d.txt' overwrite into table complex_struct2_d_txt; + +insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_d_txt; + +explain vectorization detail +select insert_num,part,b,s2 from part_add_various_various_struct2; + +select insert_num,part,b,s2 from part_add_various_various_struct2; + +drop table part_add_various_various_struct2; + + + + +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS for Various --> Various: ADD COLUMNS to STRUCT type as LAST column of 3 columns +-- +CREATE TABLE part_add_to_various_various_struct4(insert_num int, b STRING, s3 STRUCT) PARTITIONED BY(part INT); + +CREATE TABLE complex_struct4_a_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_a.txt' overwrite into table complex_struct4_a_txt; + +insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_a_txt; + +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_add_to_various_various_struct4 replace columns (insert_num int, b STRING, s3 STRUCT); + +CREATE TABLE complex_struct4_b_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_b.txt' overwrite into table complex_struct4_b_txt; + +insert into table part_add_to_various_various_struct4 partition(part=2) select * from complex_struct4_b_txt; + +CREATE TABLE complex_struct4_c_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_c.txt' overwrite into table complex_struct4_c_txt; + +insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_c_txt; + +explain vectorization detail +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +drop table part_add_to_various_various_struct4; diff --git ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_primitive_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_primitive_llap_io.q new file mode 100644 index 0000000..d7eb636 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_all_primitive_llap_io.q @@ -0,0 +1,207 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=false; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, Vectorized, MapWork, Partitioned --> all primitive conversions +-- +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS Various --> Various +-- +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2; + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: +-- (BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, TIMESTAMP) --> BOOLEAN and +-- (BOOLEAN, SHORT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> BYTE 128 and a maximum value of 127 and +-- (BOOLEAN, TINYINT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> SMALLINT -32768 and a maximum value of 32767 and +-- (BOOLEAN, TINYINT, SMALLINT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> INT –2147483648 to 2147483647 and +-- (BOOLEAN, TINYINT, SMALLINT, INT, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> BIGINT -9223372036854775808 to 9223372036854775807 +-- +CREATE TABLE part_change_various_various_boolean_to_bigint(insert_num int, + c1 TINYINT, c2 SMALLINT, c3 INT, c4 BIGINT, c5 FLOAT, c6 DOUBLE, c7 DECIMAL(38,18), c8 STRING, c9 TIMESTAMP, + c10 BOOLEAN, c11 SMALLINT, c12 INT, c13 BIGINT, c14 FLOAT, c15 DOUBLE, c16 DECIMAL(38,18), c17 STRING, c18 CHAR(25), c19 VARCHAR(25), c20 TIMESTAMP, + c21 BOOLEAN, c22 TINYINT, c23 INT, c24 BIGINT, c25 FLOAT, c26 DOUBLE, c27 DECIMAL(38,18), c28 STRING, c29 CHAR(25), c30 VARCHAR(25), c31 TIMESTAMP, + c32 BOOLEAN, c33 TINYINT, c34 SMALLINT, c35 BIGINT, c36 FLOAT, c37 DOUBLE, c38 DECIMAL(38,18), c39 STRING, c40 CHAR(25), c41 VARCHAR(25), c42 TIMESTAMP, + c43 BOOLEAN, c44 TINYINT, c45 SMALLINT, c46 INT, c47 FLOAT, c48 DOUBLE, c49 DECIMAL(38,18), c50 STRING, c51 CHAR(25), c52 VARCHAR(25), c53 TIMESTAMP, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, boolean_str, timestamp1, + boolean1, smallint1, int1, bigint1, float1, double1, decimal1, tinyint_str, tinyint_str, tinyint_str, timestamp1, + boolean1, tinyint1, int1, bigint1, float1, double1, decimal1, smallint_str, smallint_str, smallint_str, timestamp1, + boolean1, tinyint1, smallint1, bigint1, float1, double1, decimal1, int_str, int_str, int_str, timestamp1, + boolean1, tinyint1, smallint1, int1, float1, double1, decimal1, bigint_str, bigint_str, bigint_str, timestamp1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_boolean_to_bigint replace columns (insert_num int, + c1 BOOLEAN, c2 BOOLEAN, c3 BOOLEAN, c4 BOOLEAN, c5 BOOLEAN, c6 BOOLEAN, c7 BOOLEAN, c8 BOOLEAN, c9 BOOLEAN, + c10 TINYINT, c11 TINYINT, c12 TINYINT, c13 TINYINT, c14 TINYINT, c15 TINYINT, c16 TINYINT, c17 TINYINT, c18 TINYINT, c19 TINYINT, c20 TINYINT, + c21 SMALLINT, c22 SMALLINT, c23 SMALLINT, c24 SMALLINT, c25 SMALLINT, c26 SMALLINT, c27 SMALLINT, c28 SMALLINT, c29 SMALLINT, c30 SMALLINT, c31 SMALLINT, + c32 INT, c33 INT, c34 INT, c35 INT, c36 INT, c37 INT, c38 INT, c39 INT, c40 INT, c41 INT, c42 INT, + c43 BIGINT, c44 BIGINT, c45 BIGINT, c46 BIGINT, c47 BIGINT, c48 BIGINT, c49 BIGINT, c50 BIGINT, c51 BIGINT, c52 BIGINT, c53 BIGINT, + b STRING); + +insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, + bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, + 'new' FROM schema_evolution_data; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +drop table part_change_various_various_boolean_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DOUBLE, STRING, CHAR, VARCHAR, TIMESTAMP) --> DECIMAL +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> FLOAT and +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> DOUBLE and +-- +CREATE TABLE part_change_various_various_decimal_to_double(insert_num int, + c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 STRING, c9 CHAR(25), c10 VARCHAR(25), c11 TIMESTAMP, + c12 BOOLEAN, c13 TINYINT, c14 SMALLINT, c15 INT, c16 BIGINT, c17 DECIMAL(38,18), c18 DOUBLE, c19 STRING, c20 CHAR(25), c21 VARCHAR(25), c22 TIMESTAMP, + c23 BOOLEAN, c24 TINYINT, c25 SMALLINT, c26 INT, c27 BIGINT, c28 DECIMAL(38,18), c29 FLOAT, c30 STRING, c31 CHAR(25), c32 VARCHAR(25), c33 TIMESTAMP, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal_str, decimal_str, decimal_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, double1, float_str, float_str, float_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, float1, double_str, double_str, double_str, timestamp1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_decimal_to_double replace columns (insert_num int, + c1 DECIMAL(38,18), c2 DECIMAL(38,18), c3 DECIMAL(38,18), c4 DECIMAL(38,18), c5 DECIMAL(38,18), c6 DECIMAL(38,18), c7 DECIMAL(38,18), c8 DECIMAL(38,18), c9 DECIMAL(38,18), c10 DECIMAL(38,18), c11 DECIMAL(38,18), + c12 FLOAT, c13 FLOAT, c14 FLOAT, c15 FLOAT, c16 FLOAT, c17 FLOAT, c18 FLOAT, c19 FLOAT, c20 FLOAT, c21 FLOAT, c22 FLOAT, + c23 DOUBLE, c24 DOUBLE, c25 DOUBLE, c26 DOUBLE, c27 DOUBLE, c28 DOUBLE, c29 DOUBLE, c30 DOUBLE, c31 DOUBLE, c32 DOUBLE, c33 DOUBLE, + b STRING); + +insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, + float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, + double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, + 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +drop table part_change_various_various_decimal_to_double; + + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DOUBLE, DECIMAL(38,18), STRING, CHAR, VARCHAR, DATE) --> TIMESTAMP +-- +CREATE TABLE part_change_various_various_timestamp(insert_num int, c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 DECIMAL(38,18), c9 STRING, c10 CHAR(25), c11 VARCHAR(25), c12 DATE, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, timestamp_str, timestamp_str, timestamp_str, date1, 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_timestamp replace columns (insert_num int, c1 TIMESTAMP, c2 TIMESTAMP, c3 TIMESTAMP, c4 TIMESTAMP, c5 TIMESTAMP, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, c11 TIMESTAMP, c12 TIMESTAMP, b STRING); + +insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +drop table part_change_various_various_timestamp; + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: (STRING, CHAR, VARCHAR, TIMESTAMP --> DATE +-- +CREATE TABLE part_change_various_various_date(insert_num int, c1 STRING, c2 CHAR(25), c3 VARCHAR(25), c4 TIMESTAMP, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date_str, date_str, date_str, timestamp1, 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_date replace columns (insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, b STRING); + +insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date1, date1, date1, date1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +drop table part_change_various_various_date; + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Same Type (CHAR, VARCHAR, DECIMAL) --> Different maxLength or precision/scale +-- +CREATE TABLE part_change_same_type_different_params(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) PARTITIONED BY(part INT); + +CREATE TABLE same_type1_a_txt(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_a.txt' overwrite into table same_type1_a_txt; + +insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_a_txt; + +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_same_type_different_params replace columns (insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING); + +CREATE TABLE same_type1_b_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_b.txt' overwrite into table same_type1_b_txt; + +insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_b_txt; + +CREATE TABLE same_type1_c_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_c.txt' overwrite into table same_type1_c_txt; + +insert into table part_change_same_type_different_params partition(part=2) select * from same_type1_c_txt; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +drop table part_change_same_type_different_params; diff --git ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_llap_io.q new file mode 100644 index 0000000..52535dd --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_vec_part_llap_io.q @@ -0,0 +1,339 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.fetch.task.conversion=none; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, Vectorized, MapWork, Partitioned +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_permute_select add columns(c int); + +insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333); + +explain vectorization detail +select insert_num,part,a,b from part_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_permute_select; +select insert_num,part,a,b,c from part_add_int_permute_select; +select insert_num,part,c from part_add_int_permute_select; + +drop table part_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_string_permute_select add columns(c int, d string); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444'); + +explain vectorization detail +select insert_num,part,a,b from part_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_string_permute_select; +select insert_num,part,a,b,c from part_add_int_string_permute_select; +select insert_num,part,a,b,c,d from part_add_int_string_permute_select; +select insert_num,part,a,c,d from part_add_int_string_permute_select; +select insert_num,part,a,d from part_add_int_string_permute_select; +select insert_num,part,c from part_add_int_string_permute_select; +select insert_num,part,d from part_add_int_string_permute_select; + +drop table part_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +drop table part_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +drop table part_change_date_group_string_group_date_timestamp; + + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +drop table part_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +drop table part_change_numeric_group_string_group_floating_string_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT); + +insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +drop table part_change_string_group_string_group_string; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table part_change_lower_to_higher_numeric_group_decimal_to_float; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/schema_evol_orc_vec_table_llap_io.q ql/src/test/queries/clientpositive/schema_evol_orc_vec_table_llap_io.q new file mode 100644 index 0000000..ac206f0 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_orc_vec_table_llap_io.q @@ -0,0 +1,324 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=orc; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: ORC, Vectorized, MapWork, Table +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING); + +insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_permute_select add columns(c int); + +insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000); + +explain vectorization detail +select insert_num,a,b from table_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_permute_select; +select insert_num,a,b,c from table_add_int_permute_select; +select insert_num,c from table_add_int_permute_select; + +drop table table_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING); + +insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_string_permute_select add columns(c int, d string); + +insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler'); + +explain vectorization detail +select insert_num,a,b from table_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_string_permute_select; +select insert_num,a,b,c from table_add_int_string_permute_select; +select insert_num,a,b,c,d from table_add_int_string_permute_select; +select insert_num,a,c,d from table_add_int_string_permute_select; +select insert_num,a,d from table_add_int_string_permute_select; +select insert_num,c from table_add_int_string_permute_select; +select insert_num,d from table_add_int_string_permute_select; + +drop table table_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING); + +insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +drop table table_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING); + +insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group; + +drop table table_change_date_group_string_group_date_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING); + +insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +drop table table_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +drop table table_change_numeric_group_string_group_floating_string_group; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING); + +insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string; + +drop table table_change_string_group_string_group_string; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING); + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING); + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table table_change_lower_to_higher_numeric_group_decimal_to_float; diff --git ql/src/test/queries/clientpositive/schema_evol_text_nonvec_part_all_complex_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_nonvec_part_all_complex_llap_io.q new file mode 100644 index 0000000..c679ea9 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_nonvec_part_all_complex_llap_io.q @@ -0,0 +1,164 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=false; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Partitioned --> all complex conversions +-- +------------------------------------------------------------------------------------------ +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: STRUCT --> STRUCT, b STRING) PARTITIONED BY(part INT); + +CREATE TABLE complex_struct1_a_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_a.txt' overwrite into table complex_struct1_a_txt; + +insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_a_txt; + +select insert_num,part,s1,b from part_change_various_various_struct1; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_struct1 replace columns (insert_num int, s1 STRUCT, b STRING); + +CREATE TABLE complex_struct1_b_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_b.txt' overwrite into table complex_struct1_b_txt; + +insert into table part_change_various_various_struct1 partition(part=2) select * from complex_struct1_b_txt; + +CREATE TABLE complex_struct1_c_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_c.txt' overwrite into table complex_struct1_c_txt; + +insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_c_txt; + +explain vectorization detail +select insert_num,part,s1,b from part_change_various_various_struct1; + +select insert_num,part,s1,b from part_change_various_various_struct1; + +drop table part_change_various_various_struct1; + + + +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS for Various --> Various: STRUCT +-- +CREATE TABLE part_add_various_various_struct2(insert_num int, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_various_various_struct2 partition(part=1) + values(1, 'original'), + (2, 'original'); + +select insert_num,part,b from part_add_various_various_struct2; + +-- Table-Non-Cascade ADD COLUMN ... +alter table part_add_various_various_struct2 ADD columns (s2 STRUCT); + +CREATE TABLE complex_struct2_a_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_a.txt' overwrite into table complex_struct2_a_txt; + +insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_a_txt; + +CREATE TABLE complex_struct2_b_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_b.txt' overwrite into table complex_struct2_b_txt; + +insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_b_txt; + +select insert_num,part,b,s2 from part_add_various_various_struct2; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_add_various_various_struct2 REPLACE columns (insert_num int, b STRING, s2 STRUCT); + +CREATE TABLE complex_struct2_c_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_c.txt' overwrite into table complex_struct2_c_txt; + +insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_c_txt; + +CREATE TABLE complex_struct2_d_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_d.txt' overwrite into table complex_struct2_d_txt; + +insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_d_txt; + +explain vectorization detail +select insert_num,part,b,s2 from part_add_various_various_struct2; + +select insert_num,part,b,s2 from part_add_various_various_struct2; + +drop table part_add_various_various_struct2; + + + + +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS for Various --> Various: ADD COLUMNS to STRUCT type as LAST column of 3 columns +-- +CREATE TABLE part_add_to_various_various_struct4(insert_num int, b STRING, s3 STRUCT) PARTITIONED BY(part INT); + +CREATE TABLE complex_struct4_a_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_a.txt' overwrite into table complex_struct4_a_txt; + +insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_a_txt; + +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_add_to_various_various_struct4 replace columns (insert_num int, b STRING, s3 STRUCT); + +CREATE TABLE complex_struct4_b_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_b.txt' overwrite into table complex_struct4_b_txt; + +insert into table part_add_to_various_various_struct4 partition(part=2) select * from complex_struct4_b_txt; + +CREATE TABLE complex_struct4_c_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_c.txt' overwrite into table complex_struct4_c_txt; + +insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_c_txt; + +explain vectorization detail +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +drop table part_add_to_various_various_struct4; diff --git ql/src/test/queries/clientpositive/schema_evol_text_nonvec_part_all_primitive_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_nonvec_part_all_primitive_llap_io.q new file mode 100644 index 0000000..7e97c24 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_nonvec_part_all_primitive_llap_io.q @@ -0,0 +1,207 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=false; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Partitioned --> all primitive conversions +-- +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS Various --> Various +-- +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2; + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: +-- (BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, TIMESTAMP) --> BOOLEAN and +-- (BOOLEAN, SHORT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> BYTE 128 and a maximum value of 127 and +-- (BOOLEAN, TINYINT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> SMALLINT -32768 and a maximum value of 32767 and +-- (BOOLEAN, TINYINT, SMALLINT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> INT –2147483648 to 2147483647 and +-- (BOOLEAN, TINYINT, SMALLINT, INT, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> BIGINT -9223372036854775808 to 9223372036854775807 +-- +CREATE TABLE part_change_various_various_boolean_to_bigint(insert_num int, + c1 TINYINT, c2 SMALLINT, c3 INT, c4 BIGINT, c5 FLOAT, c6 DOUBLE, c7 DECIMAL(38,18), c8 STRING, c9 TIMESTAMP, + c10 BOOLEAN, c11 SMALLINT, c12 INT, c13 BIGINT, c14 FLOAT, c15 DOUBLE, c16 DECIMAL(38,18), c17 STRING, c18 CHAR(25), c19 VARCHAR(25), c20 TIMESTAMP, + c21 BOOLEAN, c22 TINYINT, c23 INT, c24 BIGINT, c25 FLOAT, c26 DOUBLE, c27 DECIMAL(38,18), c28 STRING, c29 CHAR(25), c30 VARCHAR(25), c31 TIMESTAMP, + c32 BOOLEAN, c33 TINYINT, c34 SMALLINT, c35 BIGINT, c36 FLOAT, c37 DOUBLE, c38 DECIMAL(38,18), c39 STRING, c40 CHAR(25), c41 VARCHAR(25), c42 TIMESTAMP, + c43 BOOLEAN, c44 TINYINT, c45 SMALLINT, c46 INT, c47 FLOAT, c48 DOUBLE, c49 DECIMAL(38,18), c50 STRING, c51 CHAR(25), c52 VARCHAR(25), c53 TIMESTAMP, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, boolean_str, timestamp1, + boolean1, smallint1, int1, bigint1, float1, double1, decimal1, tinyint_str, tinyint_str, tinyint_str, timestamp1, + boolean1, tinyint1, int1, bigint1, float1, double1, decimal1, smallint_str, smallint_str, smallint_str, timestamp1, + boolean1, tinyint1, smallint1, bigint1, float1, double1, decimal1, int_str, int_str, int_str, timestamp1, + boolean1, tinyint1, smallint1, int1, float1, double1, decimal1, bigint_str, bigint_str, bigint_str, timestamp1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_boolean_to_bigint replace columns (insert_num int, + c1 BOOLEAN, c2 BOOLEAN, c3 BOOLEAN, c4 BOOLEAN, c5 BOOLEAN, c6 BOOLEAN, c7 BOOLEAN, c8 BOOLEAN, c9 BOOLEAN, + c10 TINYINT, c11 TINYINT, c12 TINYINT, c13 TINYINT, c14 TINYINT, c15 TINYINT, c16 TINYINT, c17 TINYINT, c18 TINYINT, c19 TINYINT, c20 TINYINT, + c21 SMALLINT, c22 SMALLINT, c23 SMALLINT, c24 SMALLINT, c25 SMALLINT, c26 SMALLINT, c27 SMALLINT, c28 SMALLINT, c29 SMALLINT, c30 SMALLINT, c31 SMALLINT, + c32 INT, c33 INT, c34 INT, c35 INT, c36 INT, c37 INT, c38 INT, c39 INT, c40 INT, c41 INT, c42 INT, + c43 BIGINT, c44 BIGINT, c45 BIGINT, c46 BIGINT, c47 BIGINT, c48 BIGINT, c49 BIGINT, c50 BIGINT, c51 BIGINT, c52 BIGINT, c53 BIGINT, + b STRING); + +insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, + bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, + 'new' FROM schema_evolution_data; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +drop table part_change_various_various_boolean_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DOUBLE, STRING, CHAR, VARCHAR, TIMESTAMP) --> DECIMAL +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> FLOAT and +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> DOUBLE and +-- +CREATE TABLE part_change_various_various_decimal_to_double(insert_num int, + c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 STRING, c9 CHAR(25), c10 VARCHAR(25), c11 TIMESTAMP, + c12 BOOLEAN, c13 TINYINT, c14 SMALLINT, c15 INT, c16 BIGINT, c17 DECIMAL(38,18), c18 DOUBLE, c19 STRING, c20 CHAR(25), c21 VARCHAR(25), c22 TIMESTAMP, + c23 BOOLEAN, c24 TINYINT, c25 SMALLINT, c26 INT, c27 BIGINT, c28 DECIMAL(38,18), c29 FLOAT, c30 STRING, c31 CHAR(25), c32 VARCHAR(25), c33 TIMESTAMP, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal_str, decimal_str, decimal_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, double1, float_str, float_str, float_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, float1, double_str, double_str, double_str, timestamp1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_decimal_to_double replace columns (insert_num int, + c1 DECIMAL(38,18), c2 DECIMAL(38,18), c3 DECIMAL(38,18), c4 DECIMAL(38,18), c5 DECIMAL(38,18), c6 DECIMAL(38,18), c7 DECIMAL(38,18), c8 DECIMAL(38,18), c9 DECIMAL(38,18), c10 DECIMAL(38,18), c11 DECIMAL(38,18), + c12 FLOAT, c13 FLOAT, c14 FLOAT, c15 FLOAT, c16 FLOAT, c17 FLOAT, c18 FLOAT, c19 FLOAT, c20 FLOAT, c21 FLOAT, c22 FLOAT, + c23 DOUBLE, c24 DOUBLE, c25 DOUBLE, c26 DOUBLE, c27 DOUBLE, c28 DOUBLE, c29 DOUBLE, c30 DOUBLE, c31 DOUBLE, c32 DOUBLE, c33 DOUBLE, + b STRING); + +insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, + float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, + double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, + 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +drop table part_change_various_various_decimal_to_double; + + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DOUBLE, DECIMAL(38,18), STRING, CHAR, VARCHAR, DATE) --> TIMESTAMP +-- +CREATE TABLE part_change_various_various_timestamp(insert_num int, c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 DECIMAL(38,18), c9 STRING, c10 CHAR(25), c11 VARCHAR(25), c12 DATE, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, timestamp_str, timestamp_str, timestamp_str, date1, 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_timestamp replace columns (insert_num int, c1 TIMESTAMP, c2 TIMESTAMP, c3 TIMESTAMP, c4 TIMESTAMP, c5 TIMESTAMP, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, c11 TIMESTAMP, c12 TIMESTAMP, b STRING); + +insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +drop table part_change_various_various_timestamp; + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: (STRING, CHAR, VARCHAR, TIMESTAMP --> DATE +-- +CREATE TABLE part_change_various_various_date(insert_num int, c1 STRING, c2 CHAR(25), c3 VARCHAR(25), c4 TIMESTAMP, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date_str, date_str, date_str, timestamp1, 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_date replace columns (insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, b STRING); + +insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date1, date1, date1, date1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +drop table part_change_various_various_date; + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Same Type (CHAR, VARCHAR, DECIMAL) --> Different maxLength or precision/scale +-- +CREATE TABLE part_change_same_type_different_params(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) PARTITIONED BY(part INT); + +CREATE TABLE same_type1_a_txt(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_a.txt' overwrite into table same_type1_a_txt; + +insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_a_txt; + +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_same_type_different_params replace columns (insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING); + +CREATE TABLE same_type1_b_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_b.txt' overwrite into table same_type1_b_txt; + +insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_b_txt; + +CREATE TABLE same_type1_c_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_c.txt' overwrite into table same_type1_c_txt; + +insert into table part_change_same_type_different_params partition(part=2) select * from same_type1_c_txt; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +drop table part_change_same_type_different_params; diff --git ql/src/test/queries/clientpositive/schema_evol_text_nonvec_part_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_nonvec_part_llap_io.q new file mode 100644 index 0000000..ca2cfda --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_nonvec_part_llap_io.q @@ -0,0 +1,338 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Partitioned +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_permute_select add columns(c int); + +insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333); + +explain vectorization detail +select insert_num,part,a,b from part_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_permute_select; +select insert_num,part,a,b,c from part_add_int_permute_select; +select insert_num,part,c from part_add_int_permute_select; + +drop table part_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_string_permute_select add columns(c int, d string); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444'); + +explain vectorization detail +select insert_num,part,a,b from part_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_string_permute_select; +select insert_num,part,a,b,c from part_add_int_string_permute_select; +select insert_num,part,a,b,c,d from part_add_int_string_permute_select; +select insert_num,part,a,c,d from part_add_int_string_permute_select; +select insert_num,part,a,d from part_add_int_string_permute_select; +select insert_num,part,c from part_add_int_string_permute_select; +select insert_num,part,d from part_add_int_string_permute_select; + +drop table part_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +drop table part_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +drop table part_change_date_group_string_group_date_timestamp; + + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +drop table part_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +drop table part_change_numeric_group_string_group_floating_string_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT); + +insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +drop table part_change_string_group_string_group_string; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table part_change_lower_to_higher_numeric_group_decimal_to_float; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/schema_evol_text_nonvec_table_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_nonvec_table_llap_io.q new file mode 100644 index 0000000..5d5ea38 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_nonvec_table_llap_io.q @@ -0,0 +1,324 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=true; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Table +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING); + +insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_permute_select add columns(c int); + +insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000); + +explain vectorization detail +select insert_num,a,b from table_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_permute_select; +select insert_num,a,b,c from table_add_int_permute_select; +select insert_num,c from table_add_int_permute_select; + +drop table table_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING); + +insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_string_permute_select add columns(c int, d string); + +insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler'); + +explain vectorization detail +select insert_num,a,b from table_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_string_permute_select; +select insert_num,a,b,c from table_add_int_string_permute_select; +select insert_num,a,b,c,d from table_add_int_string_permute_select; +select insert_num,a,c,d from table_add_int_string_permute_select; +select insert_num,a,d from table_add_int_string_permute_select; +select insert_num,c from table_add_int_string_permute_select; +select insert_num,d from table_add_int_string_permute_select; + +drop table table_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING); + +insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +drop table table_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING); + +insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group; + +drop table table_change_date_group_string_group_date_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING); + +insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +drop table table_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +drop table table_change_numeric_group_string_group_floating_string_group; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING); + +insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string; + +drop table table_change_string_group_string_group_string; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING); + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING); + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table table_change_lower_to_higher_numeric_group_decimal_to_float; diff --git ql/src/test/queries/clientpositive/schema_evol_text_vec_part_all_complex_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_vec_part_all_complex_llap_io.q new file mode 100644 index 0000000..0549679 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_vec_part_all_complex_llap_io.q @@ -0,0 +1,165 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=false; +SET hive.vectorized.use.vector.serde.deserialize=true; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=false; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true;set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Partitioned --> all complex conversions +-- NOTE: the use of hive.vectorized.use.vector.serde.deserialize above which enables doing +-- vectorized reading of TEXTFILE format files using the vector SERDE methods. +-- +------------------------------------------------------------------------------------------ +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: STRUCT --> STRUCT, b STRING) PARTITIONED BY(part INT); + +CREATE TABLE complex_struct1_a_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_a.txt' overwrite into table complex_struct1_a_txt; + +insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_a_txt; + +select insert_num,part,s1,b from part_change_various_various_struct1; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_struct1 replace columns (insert_num int, s1 STRUCT, b STRING); + +CREATE TABLE complex_struct1_b_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_b.txt' overwrite into table complex_struct1_b_txt; + +insert into table part_change_various_various_struct1 partition(part=2) select * from complex_struct1_b_txt; + +CREATE TABLE complex_struct1_c_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_c.txt' overwrite into table complex_struct1_c_txt; + +insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_c_txt; + +explain vectorization detail +select insert_num,part,s1,b from part_change_various_various_struct1; + +select insert_num,part,s1,b from part_change_various_various_struct1; + +drop table part_change_various_various_struct1; + + + +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS for Various --> Various: STRUCT +-- +CREATE TABLE part_add_various_various_struct2(insert_num int, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_various_various_struct2 partition(part=1) + values(1, 'original'), + (2, 'original'); + +select insert_num,part,b from part_add_various_various_struct2; + +-- Table-Non-Cascade ADD COLUMN ... +alter table part_add_various_various_struct2 ADD columns (s2 STRUCT); + +CREATE TABLE complex_struct2_a_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_a.txt' overwrite into table complex_struct2_a_txt; + +insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_a_txt; + +CREATE TABLE complex_struct2_b_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_b.txt' overwrite into table complex_struct2_b_txt; + +insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_b_txt; + +select insert_num,part,b,s2 from part_add_various_various_struct2; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_add_various_various_struct2 REPLACE columns (insert_num int, b STRING, s2 STRUCT); + +CREATE TABLE complex_struct2_c_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_c.txt' overwrite into table complex_struct2_c_txt; + +insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_c_txt; + +CREATE TABLE complex_struct2_d_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_d.txt' overwrite into table complex_struct2_d_txt; + +insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_d_txt; + +explain vectorization detail +select insert_num,part,b,s2 from part_add_various_various_struct2; + +select insert_num,part,b,s2 from part_add_various_various_struct2; + +drop table part_add_various_various_struct2; + + + + +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS for Various --> Various: ADD COLUMNS to STRUCT type as LAST column of 3 columns +-- +CREATE TABLE part_add_to_various_various_struct4(insert_num int, b STRING, s3 STRUCT) PARTITIONED BY(part INT); + +CREATE TABLE complex_struct4_a_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_a.txt' overwrite into table complex_struct4_a_txt; + +insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_a_txt; + +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_add_to_various_various_struct4 replace columns (insert_num int, b STRING, s3 STRUCT); + +CREATE TABLE complex_struct4_b_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_b.txt' overwrite into table complex_struct4_b_txt; + +insert into table part_add_to_various_various_struct4 partition(part=2) select * from complex_struct4_b_txt; + +CREATE TABLE complex_struct4_c_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_c.txt' overwrite into table complex_struct4_c_txt; + +insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_c_txt; + +explain vectorization detail +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +drop table part_add_to_various_various_struct4; diff --git ql/src/test/queries/clientpositive/schema_evol_text_vec_part_all_primitive_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_vec_part_all_primitive_llap_io.q new file mode 100644 index 0000000..577a9e6 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_vec_part_all_primitive_llap_io.q @@ -0,0 +1,208 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=false; +SET hive.vectorized.use.vector.serde.deserialize=true; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=false; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true;set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Partitioned --> all primitive conversions +-- NOTE: the use of hive.vectorized.use.vector.serde.deserialize above which enables doing +-- vectorized reading of TEXTFILE format files using the vector SERDE methods. +-- +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS Various --> Various +-- +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2; + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: +-- (BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, TIMESTAMP) --> BOOLEAN and +-- (BOOLEAN, SHORT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> BYTE 128 and a maximum value of 127 and +-- (BOOLEAN, TINYINT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> SMALLINT -32768 and a maximum value of 32767 and +-- (BOOLEAN, TINYINT, SMALLINT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> INT –2147483648 to 2147483647 and +-- (BOOLEAN, TINYINT, SMALLINT, INT, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> BIGINT -9223372036854775808 to 9223372036854775807 +-- +CREATE TABLE part_change_various_various_boolean_to_bigint(insert_num int, + c1 TINYINT, c2 SMALLINT, c3 INT, c4 BIGINT, c5 FLOAT, c6 DOUBLE, c7 DECIMAL(38,18), c8 STRING, c9 TIMESTAMP, + c10 BOOLEAN, c11 SMALLINT, c12 INT, c13 BIGINT, c14 FLOAT, c15 DOUBLE, c16 DECIMAL(38,18), c17 STRING, c18 CHAR(25), c19 VARCHAR(25), c20 TIMESTAMP, + c21 BOOLEAN, c22 TINYINT, c23 INT, c24 BIGINT, c25 FLOAT, c26 DOUBLE, c27 DECIMAL(38,18), c28 STRING, c29 CHAR(25), c30 VARCHAR(25), c31 TIMESTAMP, + c32 BOOLEAN, c33 TINYINT, c34 SMALLINT, c35 BIGINT, c36 FLOAT, c37 DOUBLE, c38 DECIMAL(38,18), c39 STRING, c40 CHAR(25), c41 VARCHAR(25), c42 TIMESTAMP, + c43 BOOLEAN, c44 TINYINT, c45 SMALLINT, c46 INT, c47 FLOAT, c48 DOUBLE, c49 DECIMAL(38,18), c50 STRING, c51 CHAR(25), c52 VARCHAR(25), c53 TIMESTAMP, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, boolean_str, timestamp1, + boolean1, smallint1, int1, bigint1, float1, double1, decimal1, tinyint_str, tinyint_str, tinyint_str, timestamp1, + boolean1, tinyint1, int1, bigint1, float1, double1, decimal1, smallint_str, smallint_str, smallint_str, timestamp1, + boolean1, tinyint1, smallint1, bigint1, float1, double1, decimal1, int_str, int_str, int_str, timestamp1, + boolean1, tinyint1, smallint1, int1, float1, double1, decimal1, bigint_str, bigint_str, bigint_str, timestamp1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_boolean_to_bigint replace columns (insert_num int, + c1 BOOLEAN, c2 BOOLEAN, c3 BOOLEAN, c4 BOOLEAN, c5 BOOLEAN, c6 BOOLEAN, c7 BOOLEAN, c8 BOOLEAN, c9 BOOLEAN, + c10 TINYINT, c11 TINYINT, c12 TINYINT, c13 TINYINT, c14 TINYINT, c15 TINYINT, c16 TINYINT, c17 TINYINT, c18 TINYINT, c19 TINYINT, c20 TINYINT, + c21 SMALLINT, c22 SMALLINT, c23 SMALLINT, c24 SMALLINT, c25 SMALLINT, c26 SMALLINT, c27 SMALLINT, c28 SMALLINT, c29 SMALLINT, c30 SMALLINT, c31 SMALLINT, + c32 INT, c33 INT, c34 INT, c35 INT, c36 INT, c37 INT, c38 INT, c39 INT, c40 INT, c41 INT, c42 INT, + c43 BIGINT, c44 BIGINT, c45 BIGINT, c46 BIGINT, c47 BIGINT, c48 BIGINT, c49 BIGINT, c50 BIGINT, c51 BIGINT, c52 BIGINT, c53 BIGINT, + b STRING); + +insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, + bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, + 'new' FROM schema_evolution_data; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +drop table part_change_various_various_boolean_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DOUBLE, STRING, CHAR, VARCHAR, TIMESTAMP) --> DECIMAL +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> FLOAT and +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> DOUBLE and +-- +CREATE TABLE part_change_various_various_decimal_to_double(insert_num int, + c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 STRING, c9 CHAR(25), c10 VARCHAR(25), c11 TIMESTAMP, + c12 BOOLEAN, c13 TINYINT, c14 SMALLINT, c15 INT, c16 BIGINT, c17 DECIMAL(38,18), c18 DOUBLE, c19 STRING, c20 CHAR(25), c21 VARCHAR(25), c22 TIMESTAMP, + c23 BOOLEAN, c24 TINYINT, c25 SMALLINT, c26 INT, c27 BIGINT, c28 DECIMAL(38,18), c29 FLOAT, c30 STRING, c31 CHAR(25), c32 VARCHAR(25), c33 TIMESTAMP, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal_str, decimal_str, decimal_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, double1, float_str, float_str, float_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, float1, double_str, double_str, double_str, timestamp1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_decimal_to_double replace columns (insert_num int, + c1 DECIMAL(38,18), c2 DECIMAL(38,18), c3 DECIMAL(38,18), c4 DECIMAL(38,18), c5 DECIMAL(38,18), c6 DECIMAL(38,18), c7 DECIMAL(38,18), c8 DECIMAL(38,18), c9 DECIMAL(38,18), c10 DECIMAL(38,18), c11 DECIMAL(38,18), + c12 FLOAT, c13 FLOAT, c14 FLOAT, c15 FLOAT, c16 FLOAT, c17 FLOAT, c18 FLOAT, c19 FLOAT, c20 FLOAT, c21 FLOAT, c22 FLOAT, + c23 DOUBLE, c24 DOUBLE, c25 DOUBLE, c26 DOUBLE, c27 DOUBLE, c28 DOUBLE, c29 DOUBLE, c30 DOUBLE, c31 DOUBLE, c32 DOUBLE, c33 DOUBLE, + b STRING); + +insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, + float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, + double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, + 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +drop table part_change_various_various_decimal_to_double; + + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DOUBLE, DECIMAL(38,18), STRING, CHAR, VARCHAR, DATE) --> TIMESTAMP +-- +CREATE TABLE part_change_various_various_timestamp(insert_num int, c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 DECIMAL(38,18), c9 STRING, c10 CHAR(25), c11 VARCHAR(25), c12 DATE, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, timestamp_str, timestamp_str, timestamp_str, date1, 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_timestamp replace columns (insert_num int, c1 TIMESTAMP, c2 TIMESTAMP, c3 TIMESTAMP, c4 TIMESTAMP, c5 TIMESTAMP, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, c11 TIMESTAMP, c12 TIMESTAMP, b STRING); + +insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +drop table part_change_various_various_timestamp; + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: (STRING, CHAR, VARCHAR, TIMESTAMP --> DATE +-- +CREATE TABLE part_change_various_various_date(insert_num int, c1 STRING, c2 CHAR(25), c3 VARCHAR(25), c4 TIMESTAMP, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date_str, date_str, date_str, timestamp1, 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_date replace columns (insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, b STRING); + +insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date1, date1, date1, date1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +drop table part_change_various_various_date; + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Same Type (CHAR, VARCHAR, DECIMAL) --> Different maxLength or precision/scale +-- +CREATE TABLE part_change_same_type_different_params(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) PARTITIONED BY(part INT); + +CREATE TABLE same_type1_a_txt(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_a.txt' overwrite into table same_type1_a_txt; + +insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_a_txt; + +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_same_type_different_params replace columns (insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING); + +CREATE TABLE same_type1_b_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_b.txt' overwrite into table same_type1_b_txt; + +insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_b_txt; + +CREATE TABLE same_type1_c_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_c.txt' overwrite into table same_type1_c_txt; + +insert into table part_change_same_type_different_params partition(part=2) select * from same_type1_c_txt; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +drop table part_change_same_type_different_params; diff --git ql/src/test/queries/clientpositive/schema_evol_text_vec_part_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_vec_part_llap_io.q new file mode 100644 index 0000000..fe2d261 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_vec_part_llap_io.q @@ -0,0 +1,339 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=false; +SET hive.vectorized.use.vector.serde.deserialize=true; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true;set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Partitioned +-- NOTE: the use of hive.vectorized.use.vector.serde.deserialize above which enables doing +-- vectorized reading of TEXTFILE format files using the vector SERDE methods. +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_permute_select add columns(c int); + +insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333); + +explain vectorization detail +select insert_num,part,a,b from part_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_permute_select; +select insert_num,part,a,b,c from part_add_int_permute_select; +select insert_num,part,c from part_add_int_permute_select; + +drop table part_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_string_permute_select add columns(c int, d string); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444'); + +explain vectorization detail +select insert_num,part,a,b from part_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_string_permute_select; +select insert_num,part,a,b,c from part_add_int_string_permute_select; +select insert_num,part,a,b,c,d from part_add_int_string_permute_select; +select insert_num,part,a,c,d from part_add_int_string_permute_select; +select insert_num,part,a,d from part_add_int_string_permute_select; +select insert_num,part,c from part_add_int_string_permute_select; +select insert_num,part,d from part_add_int_string_permute_select; + +drop table part_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +drop table part_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +drop table part_change_date_group_string_group_date_timestamp; + + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +drop table part_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +drop table part_change_numeric_group_string_group_floating_string_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT); + +insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +drop table part_change_string_group_string_group_string; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table part_change_lower_to_higher_numeric_group_decimal_to_float; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/schema_evol_text_vec_table_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_vec_table_llap_io.q new file mode 100644 index 0000000..8197821 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_vec_table_llap_io.q @@ -0,0 +1,325 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=false; +SET hive.vectorized.use.vector.serde.deserialize=true; +SET hive.vectorized.use.row.serde.deserialize=false; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true;set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Table +-- NOTE: the use of hive.vectorized.use.vector.serde.deserialize above which enables doing +-- vectorized reading of TEXTFILE format files using the vector SERDE methods. +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING); + +insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_permute_select add columns(c int); + +insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000); + +explain vectorization detail +select insert_num,a,b from table_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_permute_select; +select insert_num,a,b,c from table_add_int_permute_select; +select insert_num,c from table_add_int_permute_select; + +drop table table_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING); + +insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_string_permute_select add columns(c int, d string); + +insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler'); + +explain vectorization detail +select insert_num,a,b from table_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_string_permute_select; +select insert_num,a,b,c from table_add_int_string_permute_select; +select insert_num,a,b,c,d from table_add_int_string_permute_select; +select insert_num,a,c,d from table_add_int_string_permute_select; +select insert_num,a,d from table_add_int_string_permute_select; +select insert_num,c from table_add_int_string_permute_select; +select insert_num,d from table_add_int_string_permute_select; + +drop table table_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING); + +insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +drop table table_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING); + +insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group; + +drop table table_change_date_group_string_group_date_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING); + +insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +drop table table_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +drop table table_change_numeric_group_string_group_floating_string_group; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING); + +insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string; + +drop table table_change_string_group_string_group_string; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING); + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING); + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table table_change_lower_to_higher_numeric_group_decimal_to_float; diff --git ql/src/test/queries/clientpositive/schema_evol_text_vecrow_part_all_complex_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_vecrow_part_all_complex_llap_io.q new file mode 100644 index 0000000..3d31b00 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_vecrow_part_all_complex_llap_io.q @@ -0,0 +1,166 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=false; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=true; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=false; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true;set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Partitioned --> all complex conversions +-- NOTE: the use of hive.vectorized.use.row.serde.deserialize above which enables doing +-- vectorized reading of TEXTFILE format files using the row SERDE methods. + +-- +------------------------------------------------------------------------------------------ +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: STRUCT --> STRUCT, b STRING) PARTITIONED BY(part INT); + +CREATE TABLE complex_struct1_a_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_a.txt' overwrite into table complex_struct1_a_txt; + +insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_a_txt; + +select insert_num,part,s1,b from part_change_various_various_struct1; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_struct1 replace columns (insert_num int, s1 STRUCT, b STRING); + +CREATE TABLE complex_struct1_b_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_b.txt' overwrite into table complex_struct1_b_txt; + +insert into table part_change_various_various_struct1 partition(part=2) select * from complex_struct1_b_txt; + +CREATE TABLE complex_struct1_c_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct1_c.txt' overwrite into table complex_struct1_c_txt; + +insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_c_txt; + +explain vectorization detail +select insert_num,part,s1,b from part_change_various_various_struct1; + +select insert_num,part,s1,b from part_change_various_various_struct1; + +drop table part_change_various_various_struct1; + + + +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS for Various --> Various: STRUCT +-- +CREATE TABLE part_add_various_various_struct2(insert_num int, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_various_various_struct2 partition(part=1) + values(1, 'original'), + (2, 'original'); + +select insert_num,part,b from part_add_various_various_struct2; + +-- Table-Non-Cascade ADD COLUMN ... +alter table part_add_various_various_struct2 ADD columns (s2 STRUCT); + +CREATE TABLE complex_struct2_a_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_a.txt' overwrite into table complex_struct2_a_txt; + +insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_a_txt; + +CREATE TABLE complex_struct2_b_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_b.txt' overwrite into table complex_struct2_b_txt; + +insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_b_txt; + +select insert_num,part,b,s2 from part_add_various_various_struct2; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_add_various_various_struct2 REPLACE columns (insert_num int, b STRING, s2 STRUCT); + +CREATE TABLE complex_struct2_c_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_c.txt' overwrite into table complex_struct2_c_txt; + +insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_c_txt; + +CREATE TABLE complex_struct2_d_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct2_d.txt' overwrite into table complex_struct2_d_txt; + +insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_d_txt; + +explain vectorization detail +select insert_num,part,b,s2 from part_add_various_various_struct2; + +select insert_num,part,b,s2 from part_add_various_various_struct2; + +drop table part_add_various_various_struct2; + + + + +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS for Various --> Various: ADD COLUMNS to STRUCT type as LAST column of 3 columns +-- +CREATE TABLE part_add_to_various_various_struct4(insert_num int, b STRING, s3 STRUCT) PARTITIONED BY(part INT); + +CREATE TABLE complex_struct4_a_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_a.txt' overwrite into table complex_struct4_a_txt; + +insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_a_txt; + +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_add_to_various_various_struct4 replace columns (insert_num int, b STRING, s3 STRUCT); + +CREATE TABLE complex_struct4_b_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_b.txt' overwrite into table complex_struct4_b_txt; + +insert into table part_add_to_various_various_struct4 partition(part=2) select * from complex_struct4_b_txt; + +CREATE TABLE complex_struct4_c_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile; +load data local inpath '../../data/files/schema_evolution/complex_struct4_c.txt' overwrite into table complex_struct4_c_txt; + +insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_c_txt; + +explain vectorization detail +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +select insert_num,part,b,s3 from part_add_to_various_various_struct4; + +drop table part_add_to_various_various_struct4; diff --git ql/src/test/queries/clientpositive/schema_evol_text_vecrow_part_all_primitive_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_vecrow_part_all_primitive_llap_io.q new file mode 100644 index 0000000..7c940ff --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_vecrow_part_all_primitive_llap_io.q @@ -0,0 +1,208 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=false; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=true; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=false; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true;set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Partitioned --> all primitive conversions +-- NOTE: the use of hive.vectorized.use.row.serde.deserialize above which enables doing +-- vectorized reading of TEXTFILE format files using the row SERDE methods. +-- +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS Various --> Various +-- +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2; + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: +-- (BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, TIMESTAMP) --> BOOLEAN and +-- (BOOLEAN, SHORT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> BYTE 128 and a maximum value of 127 and +-- (BOOLEAN, TINYINT, INT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> SMALLINT -32768 and a maximum value of 32767 and +-- (BOOLEAN, TINYINT, SMALLINT, LONG, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> INT –2147483648 to 2147483647 and +-- (BOOLEAN, TINYINT, SMALLINT, INT, FLOAT, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> BIGINT -9223372036854775808 to 9223372036854775807 +-- +CREATE TABLE part_change_various_various_boolean_to_bigint(insert_num int, + c1 TINYINT, c2 SMALLINT, c3 INT, c4 BIGINT, c5 FLOAT, c6 DOUBLE, c7 DECIMAL(38,18), c8 STRING, c9 TIMESTAMP, + c10 BOOLEAN, c11 SMALLINT, c12 INT, c13 BIGINT, c14 FLOAT, c15 DOUBLE, c16 DECIMAL(38,18), c17 STRING, c18 CHAR(25), c19 VARCHAR(25), c20 TIMESTAMP, + c21 BOOLEAN, c22 TINYINT, c23 INT, c24 BIGINT, c25 FLOAT, c26 DOUBLE, c27 DECIMAL(38,18), c28 STRING, c29 CHAR(25), c30 VARCHAR(25), c31 TIMESTAMP, + c32 BOOLEAN, c33 TINYINT, c34 SMALLINT, c35 BIGINT, c36 FLOAT, c37 DOUBLE, c38 DECIMAL(38,18), c39 STRING, c40 CHAR(25), c41 VARCHAR(25), c42 TIMESTAMP, + c43 BOOLEAN, c44 TINYINT, c45 SMALLINT, c46 INT, c47 FLOAT, c48 DOUBLE, c49 DECIMAL(38,18), c50 STRING, c51 CHAR(25), c52 VARCHAR(25), c53 TIMESTAMP, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, boolean_str, timestamp1, + boolean1, smallint1, int1, bigint1, float1, double1, decimal1, tinyint_str, tinyint_str, tinyint_str, timestamp1, + boolean1, tinyint1, int1, bigint1, float1, double1, decimal1, smallint_str, smallint_str, smallint_str, timestamp1, + boolean1, tinyint1, smallint1, bigint1, float1, double1, decimal1, int_str, int_str, int_str, timestamp1, + boolean1, tinyint1, smallint1, int1, float1, double1, decimal1, bigint_str, bigint_str, bigint_str, timestamp1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_boolean_to_bigint replace columns (insert_num int, + c1 BOOLEAN, c2 BOOLEAN, c3 BOOLEAN, c4 BOOLEAN, c5 BOOLEAN, c6 BOOLEAN, c7 BOOLEAN, c8 BOOLEAN, c9 BOOLEAN, + c10 TINYINT, c11 TINYINT, c12 TINYINT, c13 TINYINT, c14 TINYINT, c15 TINYINT, c16 TINYINT, c17 TINYINT, c18 TINYINT, c19 TINYINT, c20 TINYINT, + c21 SMALLINT, c22 SMALLINT, c23 SMALLINT, c24 SMALLINT, c25 SMALLINT, c26 SMALLINT, c27 SMALLINT, c28 SMALLINT, c29 SMALLINT, c30 SMALLINT, c31 SMALLINT, + c32 INT, c33 INT, c34 INT, c35 INT, c36 INT, c37 INT, c38 INT, c39 INT, c40 INT, c41 INT, c42 INT, + c43 BIGINT, c44 BIGINT, c45 BIGINT, c46 BIGINT, c47 BIGINT, c48 BIGINT, c49 BIGINT, c50 BIGINT, c51 BIGINT, c52 BIGINT, c53 BIGINT, + b STRING); + +insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, + bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, + 'new' FROM schema_evolution_data; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint; + +drop table part_change_various_various_boolean_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DOUBLE, STRING, CHAR, VARCHAR, TIMESTAMP) --> DECIMAL +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, DOUBLE, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> FLOAT and +-- (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DECIMAL, STRING, CHAR, VARCHAR, TIMESTAMP) --> DOUBLE and +-- +CREATE TABLE part_change_various_various_decimal_to_double(insert_num int, + c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 STRING, c9 CHAR(25), c10 VARCHAR(25), c11 TIMESTAMP, + c12 BOOLEAN, c13 TINYINT, c14 SMALLINT, c15 INT, c16 BIGINT, c17 DECIMAL(38,18), c18 DOUBLE, c19 STRING, c20 CHAR(25), c21 VARCHAR(25), c22 TIMESTAMP, + c23 BOOLEAN, c24 TINYINT, c25 SMALLINT, c26 INT, c27 BIGINT, c28 DECIMAL(38,18), c29 FLOAT, c30 STRING, c31 CHAR(25), c32 VARCHAR(25), c33 TIMESTAMP, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal_str, decimal_str, decimal_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, double1, float_str, float_str, float_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, float1, double_str, double_str, double_str, timestamp1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_decimal_to_double replace columns (insert_num int, + c1 DECIMAL(38,18), c2 DECIMAL(38,18), c3 DECIMAL(38,18), c4 DECIMAL(38,18), c5 DECIMAL(38,18), c6 DECIMAL(38,18), c7 DECIMAL(38,18), c8 DECIMAL(38,18), c9 DECIMAL(38,18), c10 DECIMAL(38,18), c11 DECIMAL(38,18), + c12 FLOAT, c13 FLOAT, c14 FLOAT, c15 FLOAT, c16 FLOAT, c17 FLOAT, c18 FLOAT, c19 FLOAT, c20 FLOAT, c21 FLOAT, c22 FLOAT, + c23 DOUBLE, c24 DOUBLE, c25 DOUBLE, c26 DOUBLE, c27 DOUBLE, c28 DOUBLE, c29 DOUBLE, c30 DOUBLE, c31 DOUBLE, c32 DOUBLE, c33 DOUBLE, + b STRING); + +insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, + float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, + double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, + 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double; + +drop table part_change_various_various_decimal_to_double; + + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: (BOOLEAN, TINYINT, SMALLINT, INT, LONG, FLOAT, DOUBLE, DECIMAL(38,18), STRING, CHAR, VARCHAR, DATE) --> TIMESTAMP +-- +CREATE TABLE part_change_various_various_timestamp(insert_num int, c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 DECIMAL(38,18), c9 STRING, c10 CHAR(25), c11 VARCHAR(25), c12 DATE, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, timestamp_str, timestamp_str, timestamp_str, date1, 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_timestamp replace columns (insert_num int, c1 TIMESTAMP, c2 TIMESTAMP, c3 TIMESTAMP, c4 TIMESTAMP, c5 TIMESTAMP, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, c11 TIMESTAMP, c12 TIMESTAMP, b STRING); + +insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp; + +drop table part_change_various_various_timestamp; + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Various --> Various: (STRING, CHAR, VARCHAR, TIMESTAMP --> DATE +-- +CREATE TABLE part_change_various_various_date(insert_num int, c1 STRING, c2 CHAR(25), c3 VARCHAR(25), c4 TIMESTAMP, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date_str, date_str, date_str, timestamp1, 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_various_various_date replace columns (insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, b STRING); + +insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date1, date1, date1, date1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date; + +drop table part_change_various_various_date; + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for Same Type (CHAR, VARCHAR, DECIMAL) --> Different maxLength or precision/scale +-- +CREATE TABLE part_change_same_type_different_params(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) PARTITIONED BY(part INT); + +CREATE TABLE same_type1_a_txt(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_a.txt' overwrite into table same_type1_a_txt; + +insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_a_txt; + +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_same_type_different_params replace columns (insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING); + +CREATE TABLE same_type1_b_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_b.txt' overwrite into table same_type1_b_txt; + +insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_b_txt; + +CREATE TABLE same_type1_c_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile; +load data local inpath '../../data/files/schema_evolution/same_type1_c.txt' overwrite into table same_type1_c_txt; + +insert into table part_change_same_type_different_params partition(part=2) select * from same_type1_c_txt; + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params; + +drop table part_change_same_type_different_params; diff --git ql/src/test/queries/clientpositive/schema_evol_text_vecrow_part_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_vecrow_part_llap_io.q new file mode 100644 index 0000000..22f8aca --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_vecrow_part_llap_io.q @@ -0,0 +1,339 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.mapred.mode=nonstrict; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=false; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=true; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true;set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Partitioned +-- NOTE: the use of hive.vectorized.use.row.serde.deserialize above which enables doing +-- vectorized reading of TEXTFILE format files using the row SERDE methods. +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_permute_select add columns(c int); + +insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333); + +explain vectorization detail +select insert_num,part,a,b from part_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_permute_select; +select insert_num,part,a,b,c from part_add_int_permute_select; +select insert_num,part,c from part_add_int_permute_select; + +drop table part_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new'); + +-- Table-Non-Cascade ADD COLUMNS ... +alter table part_add_int_string_permute_select add columns(c int, d string); + +insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444'); + +explain vectorization detail +select insert_num,part,a,b from part_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,part,a,b from part_add_int_string_permute_select; +select insert_num,part,a,b,c from part_add_int_string_permute_select; +select insert_num,part,a,b,c,d from part_add_int_string_permute_select; +select insert_num,part,a,c,d from part_add_int_string_permute_select; +select insert_num,part,a,d from part_add_int_string_permute_select; +select insert_num,part,c from part_add_int_string_permute_select; +select insert_num,part,d from part_add_int_string_permute_select; + +drop table part_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111; + +explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +select insert_num,part,c1,c2,c3,b from part_change_string_group_double; + +drop table part_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp; + +drop table part_change_date_group_string_group_date_timestamp; + + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group; + +drop table part_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group; + +drop table part_change_numeric_group_string_group_floating_string_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT); + +insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string; + +drop table part_change_string_group_string_group_string; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT); + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table part_change_lower_to_higher_numeric_group_decimal_to_float; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/schema_evol_text_vecrow_table_llap_io.q ql/src/test/queries/clientpositive/schema_evol_text_vecrow_table_llap_io.q new file mode 100644 index 0000000..ad95a81 --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_text_vecrow_table_llap_io.q @@ -0,0 +1,325 @@ +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.cli.print.header=true; +SET hive.exec.schema.evolution=true; +SET hive.vectorized.use.vectorized.input.format=false; +SET hive.vectorized.use.vector.serde.deserialize=false; +SET hive.vectorized.use.row.serde.deserialize=true; +SET hive.vectorized.execution.enabled=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.metastore.disallow.incompatible.col.type.changes=true; +set hive.default.fileformat=textfile; +set hive.llap.io.enabled=true;set hive.llap.io.encode.enabled=true; + +-- SORT_QUERY_RESULTS +-- +-- FILE VARIATION: TEXTFILE, Non-Vectorized, MapWork, Table +-- NOTE: the use of hive.vectorized.use.row.serde.deserialize above which enables doing +-- vectorized reading of TEXTFILE format files using the row SERDE methods. +-- + +CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile; +load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE ADD COLUMNS +-- +-- +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING); + +insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_permute_select add columns(c int); + +insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000); + +explain vectorization detail +select insert_num,a,b from table_add_int_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_permute_select; +select insert_num,a,b,c from table_add_int_permute_select; +select insert_num,c from table_add_int_permute_select; + +drop table table_add_int_permute_select; + + +-- SUBSECTION: ALTER TABLE ADD COLUMNS: INT, STRING, PERMUTE SELECT +-- +-- +CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING); + +insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade ADD COLUMNS ... +alter table table_add_int_string_permute_select add columns(c int, d string); + +insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler'); + +explain vectorization detail +select insert_num,a,b from table_add_int_string_permute_select; + +-- SELECT permutation columns to make sure NULL defaulting works right +select insert_num,a,b from table_add_int_string_permute_select; +select insert_num,a,b,c from table_add_int_string_permute_select; +select insert_num,a,b,c,d from table_add_int_string_permute_select; +select insert_num,a,c,d from table_add_int_string_permute_select; +select insert_num,a,d from table_add_int_string_permute_select; +select insert_num,c from table_add_int_string_permute_select; +select insert_num,d from table_add_int_string_permute_select; + +drop table table_add_int_string_permute_select; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> DOUBLE: (STRING, CHAR, VARCHAR) +-- +CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING); + +insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING); + +insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +select insert_num,c1,c2,c3,b from table_change_string_group_double; + +drop table table_change_string_group_double; + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for DATE_GROUP -> STRING_GROUP: DATE,TIMESTAMP, (STRING, CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) +-- +CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING); + +insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING); + +insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group; + +drop table table_change_date_group_string_group_date_group; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (TINYINT, SMALLINT, INT, BIGINT), STRING and +-- (TINYINT, SMALLINT, INT, BIGINT), CHAR and CHAR trunc and +-- (TINYINT, SMALLINT, INT, BIGINT), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING); + +insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) ; + +insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group; + +drop table table_change_numeric_group_string_group_multi_ints_string_group; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: +-- (DECIMAL, FLOAT, DOUBLE), STRING and +-- (DECIMAL, FLOAT, DOUBLE), CHAR and CHAR trunc and +-- (DECIMAL, FLOAT, DOUBLE), VARCHAR and VARCHAR trunc +-- +-- +CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING); + +insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new'); + +explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group; + +drop table table_change_numeric_group_string_group_floating_string_group; + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for STRING_GROUP -> STRING_GROUP: STRING, (CHAR, CHAR trunc, VARCHAR, VARCHAR trunc) and +-- CHAR, (VARCHAR, VARCHAR trunc, STRING) and VARCHAR, (CHAR, CHAR trunc, STRING) +-- +CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING); + +insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) ; + +insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string; + +drop table table_change_string_group_string_group_string; + + + +------------------------------------------------------------------------------------------ +-- SECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP +-- +-- +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- INT, (BIGINT, DECIMAL, FLOAT, DOUBLE) and +-- BIGINT, (DECIMAL, FLOAT, DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING); + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new'); + +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + +drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint; + + + +-- +-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" NUMERIC_GROUP: +-- DECIMAL, (FLOAT, DOUBLE) and +-- FLOAT, (DOUBLE) +-- +CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING); + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data; + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +-- Table-Non-Cascade CHANGE COLUMNS ... +alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) ; + +insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new'); + +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float; + +drop table table_change_lower_to_higher_numeric_group_decimal_to_float; diff --git ql/src/test/queries/clientpositive/vector_llap_text_1.q ql/src/test/queries/clientpositive/vector_llap_text_1.q new file mode 100644 index 0000000..e038146 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_llap_text_1.q @@ -0,0 +1,45 @@ +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.vectorized.execution.enabled=true; +set hive.llap.io.enabled=true; +set hive.map.aggr=false; +set hive.strict.checks.bucketing=false; +set hive.mapred.mode=nonstrict; +set hive.explain.user=false; + +set hive.auto.convert.join=true; +set hive.auto.convert.join.noconditionaltask=true; +set hive.auto.convert.join.noconditionaltask.size=10000; + +CREATE TABLE srcbucket_mapjoin(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE; +CREATE TABLE tab_part (key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE; +CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE; + +load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin partition(ds='2008-04-08'); +load data local inpath '../../data/files/bmj1/000001_0' INTO TABLE srcbucket_mapjoin partition(ds='2008-04-08'); + +load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08'); +load data local inpath '../../data/files/bmj/000001_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08'); +load data local inpath '../../data/files/bmj/000002_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08'); +load data local inpath '../../data/files/bmj/000003_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08'); + +set hive.optimize.bucketingsorting=false; +insert overwrite table tab_part partition (ds='2008-04-08') +select key,value from srcbucket_mapjoin_part; + +CREATE TABLE tab(key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE; +insert overwrite table tab partition (ds='2008-04-08') +select key,value from srcbucket_mapjoin; + +set hive.convert.join.bucket.mapjoin.tez = true; +explain vectorization detail +select a.key, a.value, b.value +from tab a join tab_part b on a.key = b.key +order by a.key, a.value, b.value +limit 10; +select a.key, a.value, b.value +from tab a join tab_part b on a.key = b.key +order by a.key, a.value, b.value +limit 10; + + + diff --git ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out index 629b05d..ef9a3c5 100644 --- ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out +++ ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out @@ -1146,9 +1146,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1344,9 +1345,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1513,9 +1515,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1543,9 +1546,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1931,9 +1935,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2130,9 +2135,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2300,9 +2306,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2330,9 +2337,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/llap_acid.q.out ql/src/test/results/clientpositive/llap/llap_acid.q.out index 38889b9..0ef8365 100644 --- ql/src/test/results/clientpositive/llap/llap_acid.q.out +++ ql/src/test/results/clientpositive/llap/llap_acid.q.out @@ -120,9 +120,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -261,9 +262,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -370,9 +372,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 diff --git ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out index 4a7297d..cb26440 100644 --- ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out +++ ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out @@ -114,9 +114,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -255,9 +256,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -364,9 +366,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 diff --git ql/src/test/results/clientpositive/llap/llap_partitioned.q.out ql/src/test/results/clientpositive/llap/llap_partitioned.q.out index ac75467..64d0240 100644 --- ql/src/test/results/clientpositive/llap/llap_partitioned.q.out +++ ql/src/test/results/clientpositive/llap/llap_partitioned.q.out @@ -1681,9 +1681,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 10 @@ -1747,9 +1748,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 10 @@ -2129,9 +2131,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 4 diff --git ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out index 2c13d5d..49e9517 100644 --- ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out +++ ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out @@ -93,7 +93,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -135,7 +135,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -272,7 +272,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -314,7 +314,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/mergejoin.q.out ql/src/test/results/clientpositive/llap/mergejoin.q.out index c07afdf..603843c 100644 --- ql/src/test/results/clientpositive/llap/mergejoin.q.out +++ ql/src/test/results/clientpositive/llap/mergejoin.q.out @@ -66,9 +66,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -150,9 +151,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -447,9 +449,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -500,9 +503,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1622,9 +1626,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1667,9 +1672,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1818,9 +1824,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1863,9 +1870,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2014,9 +2022,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2059,9 +2068,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2222,9 +2232,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2306,9 +2317,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2358,9 +2370,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2620,9 +2633,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2673,9 +2687,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2836,9 +2851,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2889,9 +2905,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2942,9 +2959,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2995,9 +3013,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3153,9 +3172,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3206,9 +3226,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3367,9 +3388,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3451,9 +3473,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3503,9 +3526,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3773,9 +3797,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3826,9 +3851,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3879,9 +3905,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3932,9 +3959,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -4103,9 +4131,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -4155,9 +4184,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out new file mode 100644 index 0000000..1b5251e --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out @@ -0,0 +1,1080 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 +PREHOOK: query: alter table part_add_int_permute_select add columns(c int) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: alter table part_add_int_permute_select add columns(c int) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).c SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 col4 +PREHOOK: query: select insert_num,part,a,b from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b +1 1 1111 new +2 1 2222 new +PREHOOK: query: select insert_num,part,a,b,c from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c +1 1 1111 new NULL +2 1 2222 new 3333 +PREHOOK: query: select insert_num,part,c from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part c +1 1 NULL +2 1 3333 +PREHOOK: query: drop table part_add_int_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: drop table part_add_int_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 +PREHOOK: query: alter table part_add_int_string_permute_select add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: alter table part_add_int_string_permute_select add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).c SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).d SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 col4 col5 +PREHOOK: query: explain vectorization only detail +select insert_num,part,a,b,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,a,b,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +PREHOOK: query: select insert_num,part,a,b from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b +1 1 1111 new +2 1 2222 new +PREHOOK: query: select insert_num,part,a,b,c from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c +1 1 1111 new NULL +2 1 2222 new 3333 +PREHOOK: query: select insert_num,part,a,b,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c d +1 1 1111 new NULL NULL +2 1 2222 new 3333 4444 +PREHOOK: query: select insert_num,part,a,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a c d +1 1 1111 NULL NULL +2 1 2222 3333 4444 +PREHOOK: query: select insert_num,part,a,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a d +1 1 1111 NULL +2 1 2222 4444 +PREHOOK: query: select insert_num,part,c from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part c +1 1 NULL +2 1 3333 +PREHOOK: query: select insert_num,part,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part d +1 1 NULL +2 1 4444 +PREHOOK: query: drop table part_add_int_string_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: drop table part_add_int_string_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num double1 double1 double1 _c4 +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_string_group_double +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Input: default@part_change_string_group_double@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_string_group_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Input: default@part_change_string_group_double@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 original +102 1 -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 original +103 1 NULL NULL NULL original +104 1 30.774 30.774 30.774 original +105 1 46114.28 46114.28 46114.28 original +PREHOOK: query: drop table part_change_string_group_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: drop table part_change_string_group_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 date1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c11 +PREHOOK: query: alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original +102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14.0 1978-08-02 06:34:14.0 1978-08-02 06:3 1978-08-02 06:34:14.0 1978-08-02 06:3 original +105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original +111 1 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: drop table part_change_date_group_string_group_date_timestamp +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 _c21 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 1 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL original +102 1 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 original +105 1 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 original +PREHOOK: query: alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c16 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c17 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c18 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c19 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c20 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 1 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL original +102 1 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 20333 88888 23 834 203332 888888857923222 23 834 20333 88888 original +105 1 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 original +111 1 filler filler filler filler filler filler filler filler fille fille fille fille filler filler filler filler fille fille fille fille new +PREHOOK: query: drop table part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: drop table part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 _c16 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 original +102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 original +105 1 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 original +PREHOOK: query: alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original +102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original +105 1 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original +111 1 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c5 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c6 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c7 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c8 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 b +101 1 freckled freckled freckled freckled original +102 1 ox ox ox ox original +103 1 original +104 1 I cooked I cooked I cooked I cooked original +105 1 200 200 200 200 original +PREHOOK: query: alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1 freckled freckled freckled freckled freckled freckled freckled freckled freckled freckled original +102 1 ox ox ox ox ox ox ox ox ox ox original +103 1 original +104 1 I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked original +105 1 200 200 200 200 200 200 200 200 200 200 original +111 1 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_string_group_string_group_string +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: drop table part_change_string_group_string_group_string +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 bigint1 bigint1 bigint1 _c19 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 1 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL original +102 1 127 127 127 127 127 127 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 23 23 23 23 23 834 834 834 834 834 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 original +105 1 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 original +PREHOOK: query: alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c16 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c17 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c18 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 1 -128 -128 -128 -128.000000000000000000 -128.0 -128.0 NULL NULL NULL NULL NULL -2147483648 -2147483648.000000000000000000 -2.14748365E9 -2.147483648E9 NULL NULL NULL original +102 1 127 127 127 127.000000000000000000 127.0 127.0 32767 32767 32767.000000000000000000 32767.0 32767.0 2147483647 2147483647.000000000000000000 2.14748365E9 2.147483647E9 9223372036854775807.000000000000000000 9.223372E18 9.223372036854776E18 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 23 23 23.000000000000000000 23.0 23.0 834 834 834.000000000000000000 834.0 834.0 203332 203332.000000000000000000 203332.0 203332.0 888888857923222.000000000000000000 8.8888885E14 8.88888857923222E14 original +105 1 -99 -99 -99 -99.000000000000000000 -99.0 -99.0 -28300 -28300 -28300.000000000000000000 -28300.0 -28300.0 -999992 -999992.000000000000000000 -999992.0 -999992.0 -222282153733.000000000000000000 -2.22282154E11 -2.22282153733E11 original +111 1 7000 80000 90000000 1234.567800000000000000 9876.543 789.321 80000 90000000 1234.567800000000000000 9876.543 789.321 90000000 1234.567800000000000000 9876.543 789.321 1234.567800000000000000 9876.543 789.321 new +PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 float1 _c4 +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 99999999999999999999.999999999999999999 99999999999999999999.999999999999999999 Infinity original +102 1 -99999999999999999999.999999999999999999 -99999999999999999999.999999999999999999 -Infinity original +103 1 NULL NULL NULL original +104 1 66475.561431000000000000 66475.561431000000000000 -100.35978 original +105 1 9250340.750000000000000000 9250340.750000000000000000 NULL original +PREHOOK: query: alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 1.0E20 1.0E20 Infinity original +102 1 -1.0E20 -1.0E20 -Infinity original +103 1 NULL NULL NULL original +104 1 66475.56 66475.561431 -100.35978 original +105 1 9250341.0 9250340.75 NULL original +111 1 1234.5677 9876.543 1234.5678 new +PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_update_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_update_llap_io.q.out new file mode 100644 index 0000000..2bcb6d5 --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_update_llap_io.q.out @@ -0,0 +1,1681 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: CREATE TABLE partitioned_update_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@partitioned_update_1 +POSTHOOK: query: CREATE TABLE partitioned_update_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@partitioned_update_1 +PREHOOK: query: insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@partitioned_update_1@part=1 +POSTHOOK: query: insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@partitioned_update_1@part=1 +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table partitioned_update_1 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Output: default@partitioned_update_1 +POSTHOOK: query: alter table partitioned_update_1 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Output: default@partitioned_update_1 +PREHOOK: query: insert into table partitioned_update_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_update_1@part=2 +POSTHOOK: query: insert into table partitioned_update_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_update_1@part=2 +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=2).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=2).b SIMPLE [] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=2).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=2).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=2).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_update_1@part=1 +POSTHOOK: query: insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_update_1@part=1 +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Input: default@partitioned_update_1@part=1 +PREHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Input: default@partitioned_update_1@part=1 +POSTHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +102 1 2147483647 original NULL NULL +103 1 NULL original NULL NULL +104 1 203332 original NULL NULL +105 1 -999992 original NULL NULL +106 2 0 new 0 hangar paralysed companion +107 2 127 new 127 bottom +108 2 NULL new NULL baffling +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +114 1 -128 new -128 smaller than Scotland +115 1 32768 new 32768 major glaciation +PREHOOK: query: update partitioned_update_1 set c=99 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Input: default@partitioned_update_1@part=1 +PREHOOK: Input: default@partitioned_update_1@part=2 +PREHOOK: Output: default@partitioned_update_1@part=1 +PREHOOK: Output: default@partitioned_update_1@part=2 +POSTHOOK: query: update partitioned_update_1 set c=99 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Input: default@partitioned_update_1@part=1 +POSTHOOK: Input: default@partitioned_update_1@part=2 +POSTHOOK: Output: default@partitioned_update_1@part=1 +POSTHOOK: Output: default@partitioned_update_1@part=2 +row__id insert_num a b _c4 d part +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Input: default@partitioned_update_1@part=1 +PREHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Input: default@partitioned_update_1@part=1 +POSTHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original 99 NULL +102 1 2147483647 original 99 NULL +103 1 NULL original 99 NULL +104 1 203332 original 99 NULL +105 1 -999992 original 99 NULL +106 2 0 new 99 hangar paralysed companion +107 2 127 new 99 bottom +108 2 NULL new 99 baffling +109 2 17 new 99 feather weight +110 2 400 new 99 window +111 1 -57923222 new 99 astonishing +112 1 82153733 new 99 Antarctic peninsula +113 1 40000 new 99 Delaware +114 1 -128 new 99 smaller than Scotland +115 1 32768 new 99 major glaciation +PREHOOK: query: alter table partitioned_update_1 partition(part=1) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_update_1 partition(part=1) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: alter table partitioned_update_1 partition(part=2) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_update_1 partition(part=2) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Input: default@partitioned_update_1@part=1 +PREHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Input: default@partitioned_update_1@part=1 +POSTHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original 99 NULL +102 1 2147483647 original 99 NULL +103 1 NULL original 99 NULL +104 1 203332 original 99 NULL +105 1 -999992 original 99 NULL +106 2 0 new 99 hangar paralysed companion +107 2 127 new 99 bottom +108 2 NULL new 99 baffling +109 2 17 new 99 feather weight +110 2 400 new 99 window +111 1 -57923222 new 99 astonishing +112 1 82153733 new 99 Antarctic peninsula +113 1 40000 new 99 Delaware +114 1 -128 new 99 smaller than Scotland +115 1 32768 new 99 major glaciation +PREHOOK: query: DROP TABLE partitioned_update_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Output: default@partitioned_update_1 +POSTHOOK: query: DROP TABLE partitioned_update_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Output: default@partitioned_update_1 +PREHOOK: query: CREATE TABLE partitioned_delete_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@partitioned_delete_1 +POSTHOOK: query: CREATE TABLE partitioned_delete_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@partitioned_delete_1 +PREHOOK: query: insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@partitioned_delete_1@part=1 +POSTHOOK: query: insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@partitioned_delete_1@part=1 +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table partitioned_delete_1 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Output: default@partitioned_delete_1 +POSTHOOK: query: alter table partitioned_delete_1 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Output: default@partitioned_delete_1 +PREHOOK: query: insert into table partitioned_delete_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_delete_1@part=2 +POSTHOOK: query: insert into table partitioned_delete_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_delete_1@part=2 +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=2).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=2).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=2).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=2).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=2).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_delete_1@part=1 +POSTHOOK: query: insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_delete_1@part=1 +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select part,a,b,c,d from partitioned_delete_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Input: default@partitioned_delete_1@part=1 +PREHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select part,a,b,c,d from partitioned_delete_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Input: default@partitioned_delete_1@part=1 +POSTHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +part a b c d +1 -128 new -128 smaller than Scotland +1 -2147483648 original NULL NULL +1 -57923222 new -57923222 astonishing +1 -999992 original NULL NULL +1 203332 original NULL NULL +1 2147483647 original NULL NULL +1 32768 new 32768 major glaciation +1 40000 new 40000 Delaware +1 82153733 new 82153733 Antarctic peninsula +1 NULL original NULL NULL +2 0 new 0 hangar paralysed companion +2 127 new 127 bottom +2 17 new 17 feather weight +2 400 new 400 window +2 NULL new NULL baffling +PREHOOK: query: delete from partitioned_delete_1 where insert_num = 102 or insert_num = 104 or insert_num = 106 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Input: default@partitioned_delete_1@part=1 +PREHOOK: Input: default@partitioned_delete_1@part=2 +PREHOOK: Output: default@partitioned_delete_1@part=1 +PREHOOK: Output: default@partitioned_delete_1@part=2 +POSTHOOK: query: delete from partitioned_delete_1 where insert_num = 102 or insert_num = 104 or insert_num = 106 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Input: default@partitioned_delete_1@part=1 +POSTHOOK: Input: default@partitioned_delete_1@part=2 +POSTHOOK: Output: default@partitioned_delete_1@part=1 +POSTHOOK: Output: default@partitioned_delete_1@part=2 +row__id part +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Input: default@partitioned_delete_1@part=1 +PREHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Input: default@partitioned_delete_1@part=1 +POSTHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +103 1 NULL original NULL NULL +105 1 -999992 original NULL NULL +107 2 127 new 127 bottom +108 2 NULL new NULL baffling +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +114 1 -128 new -128 smaller than Scotland +115 1 32768 new 32768 major glaciation +PREHOOK: query: alter table partitioned_delete_1 partition(part=1) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_delete_1 partition(part=1) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: alter table partitioned_delete_1 partition(part=2) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_delete_1 partition(part=2) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Input: default@partitioned_delete_1@part=1 +PREHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Input: default@partitioned_delete_1@part=1 +POSTHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +103 1 NULL original NULL NULL +105 1 -999992 original NULL NULL +107 2 127 new 127 bottom +108 2 NULL new NULL baffling +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +114 1 -128 new -128 smaller than Scotland +115 1 32768 new 32768 major glaciation +PREHOOK: query: DROP TABLE partitioned_delete_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Output: default@partitioned_delete_1 +POSTHOOK: query: DROP TABLE partitioned_delete_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Output: default@partitioned_delete_1 +PREHOOK: query: CREATE TABLE partitioned_delete_2(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@partitioned_delete_2 +POSTHOOK: query: CREATE TABLE partitioned_delete_2(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@partitioned_delete_2 +PREHOOK: query: insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@partitioned_delete_2@part=1 +POSTHOOK: query: insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@partitioned_delete_2@part=1 +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table partitioned_delete_2 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Output: default@partitioned_delete_2 +POSTHOOK: query: alter table partitioned_delete_2 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Output: default@partitioned_delete_2 +PREHOOK: query: insert into table partitioned_delete_2 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_delete_2@part=2 +POSTHOOK: query: insert into table partitioned_delete_2 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_delete_2@part=2 +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=2).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=2).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=2).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=2).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=2).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_delete_2@part=1 +POSTHOOK: query: insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_delete_2@part=1 +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Input: default@partitioned_delete_2@part=1 +PREHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Input: default@partitioned_delete_2@part=1 +POSTHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +102 1 2147483647 original NULL NULL +103 1 NULL original NULL NULL +104 1 203332 original NULL NULL +105 1 -999992 original NULL NULL +106 2 0 new 0 hangar paralysed companion +107 2 127 new 127 bottom +108 2 NULL new NULL baffling +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +114 1 -128 new -128 smaller than Scotland +115 1 32768 new 32768 major glaciation +PREHOOK: query: delete from partitioned_delete_2 where insert_num = 108 or insert_num > 113 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Input: default@partitioned_delete_2@part=1 +PREHOOK: Input: default@partitioned_delete_2@part=2 +PREHOOK: Output: default@partitioned_delete_2@part=1 +PREHOOK: Output: default@partitioned_delete_2@part=2 +POSTHOOK: query: delete from partitioned_delete_2 where insert_num = 108 or insert_num > 113 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Input: default@partitioned_delete_2@part=1 +POSTHOOK: Input: default@partitioned_delete_2@part=2 +POSTHOOK: Output: default@partitioned_delete_2@part=1 +POSTHOOK: Output: default@partitioned_delete_2@part=2 +row__id part +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Input: default@partitioned_delete_2@part=1 +PREHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Input: default@partitioned_delete_2@part=1 +POSTHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +102 1 2147483647 original NULL NULL +103 1 NULL original NULL NULL +104 1 203332 original NULL NULL +105 1 -999992 original NULL NULL +106 2 0 new 0 hangar paralysed companion +107 2 127 new 127 bottom +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +PREHOOK: query: alter table partitioned_delete_2 partition(part=1) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_delete_2 partition(part=1) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: alter table partitioned_delete_2 partition(part=2) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_delete_2 partition(part=2) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Input: default@partitioned_delete_2@part=1 +PREHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Input: default@partitioned_delete_2@part=1 +POSTHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +102 1 2147483647 original NULL NULL +103 1 NULL original NULL NULL +104 1 203332 original NULL NULL +105 1 -999992 original NULL NULL +106 2 0 new 0 hangar paralysed companion +107 2 127 new 127 bottom +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +PREHOOK: query: DROP TABLE partitioned_delete_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Output: default@partitioned_delete_2 +POSTHOOK: query: DROP TABLE partitioned_delete_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Output: default@partitioned_delete_2 +PREHOOK: query: drop table if exists missing_ddl_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists missing_ddl_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table missing_ddl_2(name string, age int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@missing_ddl_2 +POSTHOOK: query: create table missing_ddl_2(name string, age int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@missing_ddl_2 +PREHOOK: query: insert overwrite table missing_ddl_2 select value, key from srcbucket +PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket +PREHOOK: Output: default@missing_ddl_2 +POSTHOOK: query: insert overwrite table missing_ddl_2 select value, key from srcbucket +POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket +POSTHOOK: Output: default@missing_ddl_2 +POSTHOOK: Lineage: missing_ddl_2.age SIMPLE [(srcbucket)srcbucket.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: missing_ddl_2.name SIMPLE [(srcbucket)srcbucket.FieldSchema(name:value, type:string, comment:null), ] +value key +PREHOOK: query: alter table missing_ddl_2 add columns (gps double) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@missing_ddl_2 +PREHOOK: Output: default@missing_ddl_2 +POSTHOOK: query: alter table missing_ddl_2 add columns (gps double) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@missing_ddl_2 +POSTHOOK: Output: default@missing_ddl_2 +PREHOOK: query: DROP TABLE IF EXISTS all100kjson_textfile_orc +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS all100kjson_textfile_orc +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE all100kjson_textfile_orc ( + si smallint, + i int, + b bigint, + f float, + d double, + s string, + bo boolean, + ts timestamp) + PARTITIONED BY (t tinyint) + ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' + WITH SERDEPROPERTIES ('timestamp.formats'='yyyy-MM-dd\'T\'HH:mm:ss') + STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@all100kjson_textfile_orc +POSTHOOK: query: CREATE TABLE all100kjson_textfile_orc ( + si smallint, + i int, + b bigint, + f float, + d double, + s string, + bo boolean, + ts timestamp) + PARTITIONED BY (t tinyint) + ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' + WITH SERDEPROPERTIES ('timestamp.formats'='yyyy-MM-dd\'T\'HH:mm:ss') + STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@all100kjson_textfile_orc +PREHOOK: query: INSERT INTO TABLE all100kjson_textfile_orc PARTITION (t) SELECT csmallint, cint, cbigint, cfloat, cdouble, cstring1, cboolean1, ctimestamp1, ctinyint FROM alltypesorc WHERE ctinyint > 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@all100kjson_textfile_orc +POSTHOOK: query: INSERT INTO TABLE all100kjson_textfile_orc PARTITION (t) SELECT csmallint, cint, cbigint, cfloat, cdouble, cstring1, cboolean1, ctimestamp1, ctinyint FROM alltypesorc WHERE ctinyint > 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@all100kjson_textfile_orc@t=1 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=10 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=11 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=12 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=13 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=14 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=15 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=16 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=17 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=18 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=19 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=2 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=20 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=21 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=22 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=23 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=24 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=25 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=26 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=27 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=28 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=29 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=3 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=30 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=31 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=32 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=33 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=34 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=35 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=36 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=37 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=38 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=39 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=4 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=40 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=41 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=42 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=43 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=44 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=45 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=46 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=47 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=48 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=49 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=5 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=50 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=51 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=52 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=53 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=54 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=55 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=56 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=57 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=58 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=59 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=6 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=60 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=61 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=62 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=7 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=8 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=9 +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=10).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=10).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=10).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=10).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=10).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=10).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=10).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=10).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=11).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=11).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=11).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=11).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=11).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=11).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=11).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=11).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=12).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=12).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=12).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=12).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=12).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=12).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=12).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=12).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=13).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=13).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=13).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=13).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=13).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=13).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=13).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=13).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=14).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=14).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=14).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=14).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=14).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=14).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=14).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=14).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=15).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=15).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=15).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=15).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=15).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=15).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=15).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=15).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=16).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=16).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=16).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=16).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=16).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=16).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=16).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=16).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=17).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=17).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=17).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=17).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=17).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=17).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=17).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=17).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=18).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=18).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=18).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=18).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=18).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=18).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=18).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=18).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=19).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=19).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=19).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=19).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=19).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=19).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=19).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=19).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=1).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=1).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=1).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=1).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=1).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=1).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=1).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=1).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=20).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=20).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=20).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=20).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=20).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=20).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=20).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=20).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=21).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=21).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=21).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=21).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=21).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=21).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=21).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=21).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=22).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=22).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=22).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=22).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=22).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=22).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=22).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=22).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=23).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=23).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=23).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=23).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=23).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=23).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=23).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=23).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=24).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=24).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=24).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=24).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=24).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=24).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=24).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=24).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=25).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=25).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=25).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=25).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=25).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=25).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=25).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=25).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=26).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=26).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=26).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=26).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=26).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=26).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=26).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=26).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=27).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=27).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=27).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=27).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=27).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=27).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=27).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=27).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=28).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=28).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=28).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=28).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=28).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=28).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=28).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=28).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=29).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=29).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=29).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=29).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=29).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=29).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=29).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=29).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=2).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=2).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=2).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=2).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=2).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=2).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=2).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=2).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=30).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=30).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=30).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=30).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=30).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=30).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=30).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=30).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=31).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=31).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=31).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=31).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=31).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=31).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=31).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=31).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=32).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=32).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=32).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=32).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=32).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=32).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=32).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=32).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=33).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=33).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=33).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=33).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=33).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=33).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=33).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=33).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=34).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=34).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=34).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=34).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=34).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=34).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=34).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=34).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=35).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=35).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=35).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=35).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=35).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=35).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=35).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=35).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=36).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=36).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=36).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=36).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=36).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=36).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=36).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=36).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=37).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=37).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=37).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=37).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=37).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=37).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=37).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=37).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=38).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=38).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=38).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=38).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=38).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=38).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=38).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=38).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=39).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=39).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=39).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=39).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=39).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=39).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=39).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=39).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=3).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=3).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=3).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=3).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=3).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=3).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=3).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=3).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=40).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=40).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=40).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=40).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=40).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=40).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=40).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=40).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=41).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=41).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=41).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=41).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=41).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=41).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=41).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=41).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=42).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=42).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=42).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=42).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=42).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=42).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=42).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=42).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=43).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=43).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=43).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=43).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=43).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=43).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=43).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=43).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=44).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=44).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=44).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=44).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=44).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=44).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=44).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=44).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=45).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=45).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=45).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=45).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=45).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=45).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=45).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=45).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=46).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=46).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=46).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=46).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=46).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=46).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=46).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=46).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=47).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=47).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=47).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=47).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=47).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=47).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=47).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=47).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=48).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=48).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=48).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=48).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=48).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=48).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=48).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=48).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=49).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=49).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=49).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=49).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=49).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=49).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=49).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=49).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=4).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=4).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=4).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=4).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=4).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=4).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=4).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=4).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=50).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=50).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=50).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=50).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=50).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=50).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=50).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=50).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=51).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=51).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=51).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=51).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=51).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=51).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=51).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=51).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=52).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=52).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=52).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=52).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=52).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=52).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=52).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=52).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=53).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=53).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=53).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=53).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=53).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=53).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=53).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=53).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=54).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=54).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=54).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=54).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=54).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=54).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=54).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=54).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=55).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=55).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=55).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=55).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=55).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=55).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=55).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=55).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=56).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=56).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=56).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=56).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=56).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=56).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=56).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=56).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=57).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=57).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=57).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=57).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=57).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=57).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=57).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=57).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=58).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=58).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=58).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=58).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=58).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=58).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=58).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=58).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=59).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=59).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=59).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=59).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=59).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=59).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=59).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=59).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=5).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=5).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=5).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=5).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=5).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=5).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=5).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=5).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=60).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=60).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=60).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=60).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=60).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=60).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=60).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=60).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=61).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=61).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=61).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=61).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=61).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=61).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=61).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=61).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=62).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=62).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=62).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=62).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=62).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=62).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=62).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=62).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=6).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=6).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=6).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=6).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=6).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=6).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=6).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=6).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=7).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=7).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=7).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=7).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=7).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=7).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=7).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=7).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=8).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=8).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=8).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=8).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=8).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=8).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=8).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=8).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=9).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=9).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=9).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=9).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=9).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=9).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=9).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=9).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +csmallint cint cbigint cfloat cdouble cstring1 cboolean1 ctimestamp1 ctinyint +PREHOOK: query: ALTER TABLE all100kjson_textfile_orc + SET FILEFORMAT + INPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' + OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' + SERDE 'org.apache.hadoop.hive.ql.io.orc.OrcSerde' +PREHOOK: type: ALTERTABLE_FILEFORMAT +PREHOOK: Input: default@all100kjson_textfile_orc +PREHOOK: Output: default@all100kjson_textfile_orc +POSTHOOK: query: ALTER TABLE all100kjson_textfile_orc + SET FILEFORMAT + INPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' + OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' + SERDE 'org.apache.hadoop.hive.ql.io.orc.OrcSerde' +POSTHOOK: type: ALTERTABLE_FILEFORMAT +POSTHOOK: Input: default@all100kjson_textfile_orc +POSTHOOK: Output: default@all100kjson_textfile_orc +PREHOOK: query: INSERT INTO TABLE all100kjson_textfile_orc PARTITION (t) SELECT csmallint, cint, cbigint, cfloat, cdouble, cstring1, cboolean1, ctimestamp1, ctinyint FROM alltypesorc WHERE ctinyint < 1 and ctinyint > -50 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@all100kjson_textfile_orc +POSTHOOK: query: INSERT INTO TABLE all100kjson_textfile_orc PARTITION (t) SELECT csmallint, cint, cbigint, cfloat, cdouble, cstring1, cboolean1, ctimestamp1, ctinyint FROM alltypesorc WHERE ctinyint < 1 and ctinyint > -50 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-1 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-10 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-11 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-12 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-13 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-14 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-15 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-16 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-17 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-18 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-19 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-2 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-20 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-21 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-22 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-23 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-24 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-25 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-26 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-27 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-28 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-29 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-3 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-30 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-31 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-32 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-33 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-34 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-35 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-36 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-37 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-38 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-39 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-4 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-40 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-41 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-42 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-43 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-44 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-45 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-46 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-47 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-48 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-49 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-5 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-6 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-7 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-8 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=-9 +POSTHOOK: Output: default@all100kjson_textfile_orc@t=0 +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-10).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-10).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-10).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-10).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-10).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-10).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-10).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-10).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-11).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-11).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-11).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-11).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-11).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-11).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-11).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-11).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-12).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-12).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-12).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-12).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-12).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-12).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-12).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-12).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-13).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-13).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-13).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-13).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-13).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-13).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-13).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-13).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-14).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-14).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-14).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-14).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-14).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-14).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-14).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-14).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-15).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-15).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-15).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-15).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-15).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-15).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-15).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-15).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-16).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-16).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-16).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-16).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-16).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-16).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-16).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-16).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-17).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-17).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-17).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-17).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-17).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-17).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-17).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-17).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-18).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-18).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-18).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-18).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-18).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-18).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-18).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-18).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-19).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-19).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-19).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-19).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-19).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-19).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-19).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-19).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-1).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-1).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-1).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-1).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-1).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-1).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-1).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-1).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-20).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-20).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-20).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-20).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-20).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-20).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-20).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-20).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-21).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-21).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-21).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-21).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-21).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-21).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-21).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-21).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-22).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-22).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-22).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-22).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-22).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-22).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-22).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-22).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-23).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-23).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-23).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-23).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-23).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-23).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-23).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-23).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-24).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-24).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-24).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-24).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-24).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-24).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-24).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-24).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-25).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-25).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-25).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-25).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-25).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-25).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-25).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-25).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-26).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-26).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-26).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-26).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-26).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-26).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-26).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-26).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-27).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-27).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-27).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-27).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-27).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-27).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-27).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-27).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-28).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-28).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-28).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-28).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-28).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-28).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-28).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-28).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-29).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-29).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-29).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-29).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-29).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-29).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-29).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-29).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-2).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-2).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-2).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-2).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-2).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-2).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-2).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-2).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-30).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-30).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-30).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-30).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-30).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-30).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-30).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-30).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-31).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-31).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-31).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-31).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-31).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-31).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-31).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-31).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-32).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-32).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-32).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-32).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-32).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-32).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-32).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-32).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-33).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-33).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-33).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-33).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-33).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-33).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-33).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-33).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-34).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-34).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-34).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-34).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-34).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-34).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-34).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-34).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-35).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-35).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-35).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-35).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-35).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-35).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-35).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-35).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-36).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-36).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-36).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-36).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-36).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-36).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-36).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-36).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-37).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-37).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-37).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-37).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-37).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-37).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-37).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-37).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-38).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-38).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-38).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-38).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-38).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-38).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-38).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-38).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-39).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-39).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-39).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-39).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-39).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-39).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-39).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-39).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-3).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-3).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-3).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-3).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-3).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-3).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-3).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-3).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-40).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-40).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-40).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-40).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-40).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-40).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-40).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-40).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-41).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-41).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-41).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-41).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-41).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-41).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-41).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-41).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-42).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-42).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-42).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-42).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-42).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-42).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-42).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-42).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-43).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-43).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-43).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-43).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-43).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-43).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-43).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-43).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-44).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-44).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-44).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-44).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-44).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-44).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-44).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-44).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-45).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-45).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-45).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-45).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-45).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-45).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-45).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-45).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-46).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-46).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-46).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-46).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-46).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-46).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-46).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-46).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-47).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-47).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-47).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-47).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-47).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-47).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-47).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-47).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-48).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-48).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-48).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-48).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-48).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-48).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-48).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-48).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-49).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-49).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-49).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-49).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-49).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-49).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-49).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-49).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-4).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-4).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-4).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-4).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-4).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-4).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-4).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-4).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-5).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-5).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-5).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-5).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-5).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-5).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-5).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-5).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-6).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-6).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-6).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-6).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-6).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-6).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-6).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-6).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-7).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-7).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-7).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-7).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-7).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-7).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-7).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-7).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-8).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-8).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-8).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-8).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-8).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-8).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-8).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-8).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-9).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-9).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-9).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-9).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-9).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-9).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-9).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=-9).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=0).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=0).bo SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=0).d SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=0).f SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=0).i SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=0).s SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=0).si SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: all100kjson_textfile_orc PARTITION(t=0).ts SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +csmallint cint cbigint cfloat cdouble cstring1 cboolean1 ctimestamp1 ctinyint +PREHOOK: query: DROP TABLE IF EXISTS emptyavro +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS emptyavro +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE emptyavro (i int) + PARTITIONED BY (s string) + STORED AS AVRO +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@emptyavro +POSTHOOK: query: CREATE TABLE emptyavro (i int) + PARTITIONED BY (s string) + STORED AS AVRO +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@emptyavro +PREHOOK: query: load data local inpath '../../data/files/empty1.txt' into table emptyavro PARTITION (s='something') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@emptyavro +POSTHOOK: query: load data local inpath '../../data/files/empty1.txt' into table emptyavro PARTITION (s='something') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@emptyavro +POSTHOOK: Output: default@emptyavro@s=something +PREHOOK: query: SELECT COUNT(*) from emptyavro +PREHOOK: type: QUERY +PREHOOK: Input: default@emptyavro +PREHOOK: Input: default@emptyavro@s=something +#### A masked pattern was here #### +POSTHOOK: query: SELECT COUNT(*) from emptyavro +POSTHOOK: type: QUERY +POSTHOOK: Input: default@emptyavro +POSTHOOK: Input: default@emptyavro@s=something +#### A masked pattern was here #### +_c0 +0 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out new file mode 100644 index 0000000..45dd680 --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out @@ -0,0 +1,1077 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_add_int_permute_select +POSTHOOK: Lineage: table_add_int_permute_select.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_add_int_permute_select.b SIMPLE [] +POSTHOOK: Lineage: table_add_int_permute_select.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table_add_int_permute_select add columns(c int) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table_add_int_permute_select +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: alter table table_add_int_permute_select add columns(c int) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table_add_int_permute_select +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_add_int_permute_select +POSTHOOK: Lineage: table_add_int_permute_select.a SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.b SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.c SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.insert_num SCRIPT [] +col1 col2 col3 col4 +PREHOOK: query: explain vectorization only detail +select insert_num,a,b,c from table_add_int_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,a,b,c from table_add_int_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +PREHOOK: query: select insert_num,a,b from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num a b +101 -2147483648 original +102 2147483647 original +103 NULL original +104 203332 original +105 -999992 original +111 80000 new +PREHOOK: query: select insert_num,a,b,c from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num a b c +101 -2147483648 original NULL +102 2147483647 original NULL +103 NULL original NULL +104 203332 original NULL +105 -999992 original NULL +111 80000 new 80000 +PREHOOK: query: select insert_num,c from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num c +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 80000 +PREHOOK: query: drop table table_add_int_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_add_int_permute_select +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: drop table table_add_int_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_add_int_permute_select +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: Lineage: table_add_int_string_permute_select.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_add_int_string_permute_select.b SIMPLE [] +POSTHOOK: Lineage: table_add_int_string_permute_select.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table_add_int_string_permute_select add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table_add_int_string_permute_select +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: alter table table_add_int_string_permute_select add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table_add_int_string_permute_select +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: Lineage: table_add_int_string_permute_select.a SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.b SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.c SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.d SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.insert_num SCRIPT [] +col1 col2 col3 col4 col5 +PREHOOK: query: select insert_num,a,b from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b +101 -2147483648 original +102 2147483647 original +103 NULL original +104 203332 original +105 -999992 original +111 80000 new +PREHOOK: query: select insert_num,a,b,c from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b c +101 -2147483648 original NULL +102 2147483647 original NULL +103 NULL original NULL +104 203332 original NULL +105 -999992 original NULL +111 80000 new 80000 +PREHOOK: query: select insert_num,a,b,c,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b c d +101 -2147483648 original NULL NULL +102 2147483647 original NULL NULL +103 NULL original NULL NULL +104 203332 original NULL NULL +105 -999992 original NULL NULL +111 80000 new 80000 filler +PREHOOK: query: select insert_num,a,c,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,c,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a c d +101 -2147483648 NULL NULL +102 2147483647 NULL NULL +103 NULL NULL NULL +104 203332 NULL NULL +105 -999992 NULL NULL +111 80000 80000 filler +PREHOOK: query: select insert_num,a,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a d +101 -2147483648 NULL +102 2147483647 NULL +103 NULL NULL +104 203332 NULL +105 -999992 NULL +111 80000 filler +PREHOOK: query: select insert_num,c from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num c +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 80000 +PREHOOK: query: select insert_num,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num d +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 filler +PREHOOK: query: drop table table_add_int_string_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_add_int_string_permute_select +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: drop table table_add_int_string_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_add_int_string_permute_select +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_string_group_double +POSTHOOK: Lineage: table_change_string_group_double.b SIMPLE [] +POSTHOOK: Lineage: table_change_string_group_double.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_string_group_double +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_string_group_double +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_string_group_double +POSTHOOK: Lineage: table_change_string_group_double.b SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_double +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_double +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 original +102 -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 original +103 NULL NULL NULL original +104 30.774 30.774 30.774 original +105 46114.28 46114.28 46114.28 original +111 789.321 789.321 789.321 new +PREHOOK: query: drop table table_change_string_group_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_string_group_double +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: drop table table_change_string_group_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_string_group_double +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 date1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c11 +PREHOOK: query: alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_date_group_string_group_date_group +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_date_group_string_group_date_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original +102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14.0 1978-08-02 06:34:14.0 1978-08-02 06:3 1978-08-02 06:34:14.0 1978-08-02 06:3 original +105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original +111 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_date_group_string_group_date_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_date_group_string_group_date_group +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: drop table table_change_date_group_string_group_date_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 _c21 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL original +102 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 original +105 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 original +PREHOOK: query: alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c16 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c17 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c18 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c19 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c20 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL original +102 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 20333 88888 23 834 203332 888888857923222 23 834 20333 88888 original +105 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 original +111 filler filler filler filler filler filler filler filler fille fille fille fille filler filler filler filler fille fille fille fille new +PREHOOK: query: drop table table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: drop table table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 _c16 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 original +102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 original +105 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 original +PREHOOK: query: alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original +102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original +105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original +111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: Lineage: table_change_string_group_string_group_string.b SIMPLE [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c5 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c6 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c7 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c8 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 b +101 freckled freckled freckled freckled original +102 ox ox ox ox original +103 original +104 I cooked I cooked I cooked I cooked original +105 200 200 200 200 original +PREHOOK: query: alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_string_group_string_group_string +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_string_group_string_group_string +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: Lineage: table_change_string_group_string_group_string.b SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 freckled freckled freckled freckled freckled freckled freckled freckled freckled freckled original +102 ox ox ox ox ox ox ox ox ox ox original +103 original +104 I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked original +105 200 200 200 200 200 200 200 200 200 200 original +111 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_string_group_string_group_string +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_string_group_string_group_string +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: drop table table_change_string_group_string_group_string +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_string_group_string_group_string +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.b SIMPLE [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 bigint1 bigint1 bigint1 _c19 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL original +102 127 127 127 127 127 127 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 23 23 23 23 23 834 834 834 834 834 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 original +105 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 original +PREHOOK: query: alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.b SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c16 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c17 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c18 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 -128 -128 -128 -128.000000000000000000 -128.0 -128.0 NULL NULL NULL NULL NULL -2147483648 -2147483648.000000000000000000 -2.14748365E9 -2.147483648E9 NULL NULL NULL original +102 127 127 127 127.000000000000000000 127.0 127.0 32767 32767 32767.000000000000000000 32767.0 32767.0 2147483647 2147483647.000000000000000000 2.14748365E9 2.147483647E9 9223372036854775807.000000000000000000 9.223372E18 9.223372036854776E18 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 23 23 23.000000000000000000 23.0 23.0 834 834 834.000000000000000000 834.0 834.0 203332 203332.000000000000000000 203332.0 203332.0 888888857923222.000000000000000000 8.8888885E14 8.88888857923222E14 original +105 -99 -99 -99 -99.000000000000000000 -99.0 -99.0 -28300 -28300 -28300.000000000000000000 -28300.0 -28300.0 -999992 -999992.000000000000000000 -999992.0 -999992.0 -222282153733.000000000000000000 -2.22282154E11 -2.22282153733E11 original +111 7000 80000 90000000 1234.567800000000000000 9876.543 789.321 80000 90000000 1234.567800000000000000 9876.543 789.321 90000000 1234.567800000000000000 9876.543 789.321 1234.567800000000000000 9876.543 789.321 new +PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.b SIMPLE [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 float1 _c4 +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 99999999999999999999.999999999999999999 99999999999999999999.999999999999999999 Infinity original +102 -99999999999999999999.999999999999999999 -99999999999999999999.999999999999999999 -Infinity original +103 NULL NULL NULL original +104 66475.561431000000000000 66475.561431000000000000 -100.35978 original +105 9250340.750000000000000000 9250340.750000000000000000 NULL original +PREHOOK: query: alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.b SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 1.0E20 1.0E20 Infinity original +102 -1.0E20 -1.0E20 -Infinity original +103 NULL NULL NULL original +104 66475.56 66475.561431 -100.35978 original +105 9250341.0 9250340.75 NULL original +111 1234.5677 9876.543 1234.5678 new +PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_update_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_update_llap_io.q.out new file mode 100644 index 0000000..2292dcb --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_update_llap_io.q.out @@ -0,0 +1,449 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: CREATE TABLE table5(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table5 +POSTHOOK: query: CREATE TABLE table5(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table5 +PREHOOK: query: insert into table table5 SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table5 +POSTHOOK: query: insert into table table5 SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table5 +POSTHOOK: Lineage: table5.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table5.b SIMPLE [] +POSTHOOK: Lineage: table5.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table5 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table5 +PREHOOK: Output: default@table5 +POSTHOOK: query: alter table table5 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table5 +POSTHOOK: Output: default@table5 +PREHOOK: query: insert into table table5 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@table5 +POSTHOOK: query: insert into table table5 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@table5 +POSTHOOK: Lineage: table5.a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table5.b SIMPLE [] +POSTHOOK: Lineage: table5.c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table5.d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: table5.insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select a,b,c,d from table5 +PREHOOK: type: QUERY +PREHOOK: Input: default@table5 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table5 +#### A masked pattern was here #### +a b c d +-128 new -128 smaller than Scotland +-2147483648 original NULL NULL +-57923222 new -57923222 astonishing +-999992 original NULL NULL +0 new 0 hangar paralysed companion +127 new 127 bottom +17 new 17 feather weight +203332 original NULL NULL +2147483647 original NULL NULL +32768 new 32768 major glaciation +400 new 400 window +40000 new 40000 Delaware +82153733 new 82153733 Antarctic peninsula +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: update table5 set c=99 +PREHOOK: type: QUERY +PREHOOK: Input: default@table5 +PREHOOK: Output: default@table5 +POSTHOOK: query: update table5 set c=99 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table5 +POSTHOOK: Output: default@table5 +row__id insert_num a b _c4 d +PREHOOK: query: select a,b,c,d from table5 +PREHOOK: type: QUERY +PREHOOK: Input: default@table5 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table5 +#### A masked pattern was here #### +a b c d +-128 new 99 smaller than Scotland +-2147483648 original 99 NULL +-57923222 new 99 astonishing +-999992 original 99 NULL +0 new 99 hangar paralysed companion +127 new 99 bottom +17 new 99 feather weight +203332 original 99 NULL +2147483647 original 99 NULL +32768 new 99 major glaciation +400 new 99 window +40000 new 99 Delaware +82153733 new 99 Antarctic peninsula +NULL new 99 baffling +NULL original 99 NULL +PREHOOK: query: alter table table5 compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table table5 compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select a,b,c,d from table5 +PREHOOK: type: QUERY +PREHOOK: Input: default@table5 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table5 +#### A masked pattern was here #### +a b c d +-128 new 99 smaller than Scotland +-2147483648 original 99 NULL +-57923222 new 99 astonishing +-999992 original 99 NULL +0 new 99 hangar paralysed companion +127 new 99 bottom +17 new 99 feather weight +203332 original 99 NULL +2147483647 original 99 NULL +32768 new 99 major glaciation +400 new 99 window +40000 new 99 Delaware +82153733 new 99 Antarctic peninsula +NULL new 99 baffling +NULL original 99 NULL +PREHOOK: query: DROP TABLE table5 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table5 +PREHOOK: Output: default@table5 +POSTHOOK: query: DROP TABLE table5 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table5 +POSTHOOK: Output: default@table5 +PREHOOK: query: CREATE TABLE table6(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table6 +POSTHOOK: query: CREATE TABLE table6(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table6 +PREHOOK: query: insert into table table6 SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table6 +POSTHOOK: query: insert into table table6 SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table6 +POSTHOOK: Lineage: table6.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table6.b SIMPLE [] +POSTHOOK: Lineage: table6.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table6 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table6 +PREHOOK: Output: default@table6 +POSTHOOK: query: alter table table6 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table6 +POSTHOOK: Output: default@table6 +PREHOOK: query: insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@table6 +POSTHOOK: query: insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@table6 +POSTHOOK: Lineage: table6.a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table6.b SIMPLE [] +POSTHOOK: Lineage: table6.c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table6.d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: table6.insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@table6 +POSTHOOK: query: insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@table6 +POSTHOOK: Lineage: table6.a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table6.b SIMPLE [] +POSTHOOK: Lineage: table6.c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table6.d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: table6.insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select a,b,c,d from table6 +PREHOOK: type: QUERY +PREHOOK: Input: default@table6 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table6 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table6 +#### A masked pattern was here #### +a b c d +-128 new -128 smaller than Scotland +-2147483648 original NULL NULL +-57923222 new -57923222 astonishing +-999992 original NULL NULL +0 new 0 hangar paralysed companion +127 new 127 bottom +17 new 17 feather weight +203332 original NULL NULL +2147483647 original NULL NULL +32768 new 32768 major glaciation +400 new 400 window +40000 new 40000 Delaware +82153733 new 82153733 Antarctic peninsula +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: delete from table6 where insert_num = 102 or insert_num = 104 or insert_num = 106 +PREHOOK: type: QUERY +PREHOOK: Input: default@table6 +PREHOOK: Output: default@table6 +POSTHOOK: query: delete from table6 where insert_num = 102 or insert_num = 104 or insert_num = 106 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table6 +POSTHOOK: Output: default@table6 +row__id +PREHOOK: query: select a,b,c,d from table6 +PREHOOK: type: QUERY +PREHOOK: Input: default@table6 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table6 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table6 +#### A masked pattern was here #### +a b c d +-128 new -128 smaller than Scotland +-2147483648 original NULL NULL +-57923222 new -57923222 astonishing +-999992 original NULL NULL +127 new 127 bottom +17 new 17 feather weight +32768 new 32768 major glaciation +400 new 400 window +40000 new 40000 Delaware +82153733 new 82153733 Antarctic peninsula +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: alter table table6 compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table table6 compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select a,b,c,d from table6 +PREHOOK: type: QUERY +PREHOOK: Input: default@table6 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table6 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table6 +#### A masked pattern was here #### +a b c d +-128 new -128 smaller than Scotland +-2147483648 original NULL NULL +-57923222 new -57923222 astonishing +-999992 original NULL NULL +127 new 127 bottom +17 new 17 feather weight +32768 new 32768 major glaciation +400 new 400 window +40000 new 40000 Delaware +82153733 new 82153733 Antarctic peninsula +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: DROP TABLE table6 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table6 +PREHOOK: Output: default@table6 +POSTHOOK: query: DROP TABLE table6 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table6 +POSTHOOK: Output: default@table6 +PREHOOK: query: CREATE TABLE table7(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table7 +POSTHOOK: query: CREATE TABLE table7(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table7 +PREHOOK: query: insert into table table7 SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table7 +POSTHOOK: query: insert into table table7 SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table7 +POSTHOOK: Lineage: table7.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table7.b SIMPLE [] +POSTHOOK: Lineage: table7.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table7 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table7 +PREHOOK: Output: default@table7 +POSTHOOK: query: alter table table7 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table7 +POSTHOOK: Output: default@table7 +PREHOOK: query: insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@table7 +POSTHOOK: query: insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@table7 +POSTHOOK: Lineage: table7.a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table7.b SIMPLE [] +POSTHOOK: Lineage: table7.c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table7.d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: table7.insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@table7 +POSTHOOK: query: insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@table7 +POSTHOOK: Lineage: table7.a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table7.b SIMPLE [] +POSTHOOK: Lineage: table7.c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table7.d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: table7.insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select a,b,c,d from table7 +PREHOOK: type: QUERY +PREHOOK: Input: default@table7 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table7 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table7 +#### A masked pattern was here #### +a b c d +-128 new -128 smaller than Scotland +-2147483648 original NULL NULL +-57923222 new -57923222 astonishing +-999992 original NULL NULL +0 new 0 hangar paralysed companion +127 new 127 bottom +17 new 17 feather weight +203332 original NULL NULL +2147483647 original NULL NULL +32768 new 32768 major glaciation +400 new 400 window +40000 new 40000 Delaware +82153733 new 82153733 Antarctic peninsula +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: delete from table7 where insert_num = 107 or insert_num >= 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@table7 +PREHOOK: Output: default@table7 +POSTHOOK: query: delete from table7 where insert_num = 107 or insert_num >= 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table7 +POSTHOOK: Output: default@table7 +row__id +PREHOOK: query: select a,b,c,d from table7 +PREHOOK: type: QUERY +PREHOOK: Input: default@table7 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table7 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table7 +#### A masked pattern was here #### +a b c d +-2147483648 original NULL NULL +-999992 original NULL NULL +0 new 0 hangar paralysed companion +17 new 17 feather weight +203332 original NULL NULL +2147483647 original NULL NULL +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: alter table table7 compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table table7 compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select a,b,c,d from table7 +PREHOOK: type: QUERY +PREHOOK: Input: default@table7 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table7 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table7 +#### A masked pattern was here #### +a b c d +-2147483648 original NULL NULL +-999992 original NULL NULL +0 new 0 hangar paralysed companion +17 new 17 feather weight +203332 original NULL NULL +2147483647 original NULL NULL +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: DROP TABLE table7 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table7 +PREHOOK: Output: default@table7 +POSTHOOK: query: DROP TABLE table7 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table7 +POSTHOOK: Output: default@table7 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part.q.out index 23158c3..ff6d9ee 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part.q.out @@ -94,9 +94,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -235,9 +236,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -430,9 +432,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -564,9 +567,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -696,9 +700,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 22 @@ -819,9 +824,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 22 @@ -946,9 +952,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 17 @@ -1064,9 +1071,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 17 @@ -1184,9 +1192,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -1295,9 +1304,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -1429,9 +1439,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 20 @@ -1554,9 +1565,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 20 @@ -1665,9 +1677,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -1755,9 +1768,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out new file mode 100644 index 0000000..2272db1 --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out @@ -0,0 +1,1825 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 +PREHOOK: query: alter table part_add_int_permute_select add columns(c int) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: alter table part_add_int_permute_select add columns(c int) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).c SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 col4 +PREHOOK: query: explain vectorization only detail +select insert_num,part,a,b,c from part_add_int_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,a,b,c from part_add_int_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:a:int, 2:b:string, 3:c:int, 4:part:int, 5:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 4, 1, 2, 3] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0, 1, 2, 3] + dataColumns: insert_num:int, a:int, b:string, c:int + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,a,b from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b +1 1 1111 new +2 1 2222 new +PREHOOK: query: select insert_num,part,a,b,c from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c +1 1 1111 new NULL +2 1 2222 new 3333 +PREHOOK: query: select insert_num,part,c from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part c +1 1 NULL +2 1 3333 +PREHOOK: query: drop table part_add_int_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: drop table part_add_int_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 +PREHOOK: query: alter table part_add_int_string_permute_select add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: alter table part_add_int_string_permute_select add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).c SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).d SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 col4 col5 +PREHOOK: query: explain vectorization only detail +select insert_num,part,a,b,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,a,b,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:a:int, 2:b:string, 3:c:int, 4:d:string, 5:part:int, 6:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 5, 1, 2, 3, 4] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: insert_num:int, a:int, b:string, c:int, d:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,a,b from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b +1 1 1111 new +2 1 2222 new +PREHOOK: query: select insert_num,part,a,b,c from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c +1 1 1111 new NULL +2 1 2222 new 3333 +PREHOOK: query: select insert_num,part,a,b,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c d +1 1 1111 new NULL NULL +2 1 2222 new 3333 4444 +PREHOOK: query: select insert_num,part,a,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a c d +1 1 1111 NULL NULL +2 1 2222 3333 4444 +PREHOOK: query: select insert_num,part,a,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a d +1 1 1111 NULL +2 1 2222 4444 +PREHOOK: query: select insert_num,part,c from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part c +1 1 NULL +2 1 3333 +PREHOOK: query: select insert_num,part,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part d +1 1 NULL +2 1 4444 +PREHOOK: query: drop table part_add_int_string_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: drop table part_add_int_string_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num double1 double1 double1 _c4 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:double, 2:c2:double, 3:c3:double, 4:b:string, 5:part:int, 6:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 5, 1, 2, 3, 4] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: insert_num:int, c1:double, c2:double, c3:double, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_string_group_double +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Input: default@part_change_string_group_double@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_string_group_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Input: default@part_change_string_group_double@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 original +102 1 -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 original +103 1 NULL NULL NULL original +104 1 30.774 30.774 30.774 original +105 1 46114.28 46114.28 46114.28 original +PREHOOK: query: drop table part_change_string_group_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: drop table part_change_string_group_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 date1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c11 +PREHOOK: query: alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:char(50), 3:c3:char(15), 4:c4:varchar(50), 5:c5:varchar(15), 6:c6:string, 7:c7:char(50), 8:c8:char(15), 9:c9:varchar(50), 10:c10:varchar(15), 11:b:string, 12:part:int, 13:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 12 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + dataColumns: insert_num:int, c1:string, c2:char(50), c3:char(15), c4:varchar(50), c5:varchar(15), c6:string, c7:char(50), c8:char(15), c9:varchar(50), c10:varchar(15), b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original +102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14.0 1978-08-02 06:34:14.0 1978-08-02 06:3 1978-08-02 06:34:14.0 1978-08-02 06:3 original +105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original +111 1 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: drop table part_change_date_group_string_group_date_timestamp +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 _c21 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:tinyint, 2:c2:smallint, 3:c3:int, 4:c4:bigint, 5:c5:tinyint, 6:c6:smallint, 7:c7:int, 8:c8:bigint, 9:c9:tinyint, 10:c10:smallint, 11:c11:int, 12:c12:bigint, 13:c13:tinyint, 14:c14:smallint, 15:c15:int, 16:c16:bigint, 17:c17:tinyint, 18:c18:smallint, 19:c19:int, 20:c20:bigint, 21:b:string, 22:part:int, 23:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 22, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 22 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + dataColumns: insert_num:int, c1:tinyint, c2:smallint, c3:int, c4:bigint, c5:tinyint, c6:smallint, c7:int, c8:bigint, c9:tinyint, c10:smallint, c11:int, c12:bigint, c13:tinyint, c14:smallint, c15:int, c16:bigint, c17:tinyint, c18:smallint, c19:int, c20:bigint, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 1 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL original +102 1 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 original +105 1 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 original +PREHOOK: query: alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c16 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c17 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c18 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c19 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c20 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:string, 3:c3:string, 4:c4:string, 5:c5:char(50), 6:c6:char(50), 7:c7:char(50), 8:c8:char(50), 9:c9:char(5), 10:c10:char(5), 11:c11:char(5), 12:c12:char(5), 13:c13:varchar(50), 14:c14:varchar(50), 15:c15:varchar(50), 16:c16:varchar(50), 17:c17:varchar(5), 18:c18:varchar(5), 19:c19:varchar(5), 20:c20:varchar(5), 21:b:string, 22:part:int, 23:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 22, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 22 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + dataColumns: insert_num:int, c1:string, c2:string, c3:string, c4:string, c5:char(50), c6:char(50), c7:char(50), c8:char(50), c9:char(5), c10:char(5), c11:char(5), c12:char(5), c13:varchar(50), c14:varchar(50), c15:varchar(50), c16:varchar(50), c17:varchar(5), c18:varchar(5), c19:varchar(5), c20:varchar(5), b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 1 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL original +102 1 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 20333 88888 23 834 203332 888888857923222 23 834 20333 88888 original +105 1 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 original +111 1 filler filler filler filler filler filler filler filler fille fille fille fille filler filler filler filler fille fille fille fille new +PREHOOK: query: drop table part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: drop table part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 _c16 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:decimal(38,18), 2:c2:float, 3:c3:double, 4:c4:decimal(38,18), 5:c5:float, 6:c6:double, 7:c7:decimal(38,18), 8:c8:float, 9:c9:double, 10:c10:decimal(38,18), 11:c11:float, 12:c12:double, 13:c13:decimal(38,18), 14:c14:float, 15:c15:double, 16:b:string, 17:part:int, 18:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 17, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 17 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + dataColumns: insert_num:int, c1:decimal(38,18), c2:float, c3:double, c4:decimal(38,18), c5:float, c6:double, c7:decimal(38,18), c8:float, c9:double, c10:decimal(38,18), c11:float, c12:double, c13:decimal(38,18), c14:float, c15:double, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 original +102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 original +105 1 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 original +PREHOOK: query: alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:string, 3:c3:string, 4:c4:char(50), 5:c5:char(50), 6:c6:char(50), 7:c7:char(7), 8:c8:char(7), 9:c9:char(7), 10:c10:varchar(50), 11:c11:varchar(50), 12:c12:varchar(50), 13:c13:varchar(7), 14:c14:varchar(7), 15:c15:varchar(7), 16:b:string, 17:part:int, 18:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 17, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 17 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + dataColumns: insert_num:int, c1:string, c2:string, c3:string, c4:char(50), c5:char(50), c6:char(50), c7:char(7), c8:char(7), c9:char(7), c10:varchar(50), c11:varchar(50), c12:varchar(50), c13:varchar(7), c14:varchar(7), c15:varchar(7), b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original +102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original +105 1 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original +111 1 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c5 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c6 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c7 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c8 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:string, 3:c3:string, 4:c4:string, 5:c5:char(50), 6:c6:char(50), 7:c7:char(50), 8:c8:varchar(50), 9:c9:varchar(50), 10:c10:varchar(50), 11:b:string, 12:part:int, 13:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 12, 1, 2, 3, 4, 11] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 12 + includeColumns: [0, 1, 2, 3, 4, 11] + dataColumns: insert_num:int, c1:string, c2:string, c3:string, c4:string, c5:char(50), c6:char(50), c7:char(50), c8:varchar(50), c9:varchar(50), c10:varchar(50), b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 b +101 1 freckled freckled freckled freckled original +102 1 ox ox ox ox original +103 1 original +104 1 I cooked I cooked I cooked I cooked original +105 1 200 200 200 200 original +PREHOOK: query: alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:char(50), 2:c2:char(9), 3:c3:varchar(50), 4:c4:char(9), 5:c5:varchar(50), 6:c6:varchar(9), 7:c7:string, 8:c8:char(50), 9:c9:char(9), 10:c10:string, 11:b:string, 12:part:int, 13:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 12 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + dataColumns: insert_num:int, c1:char(50), c2:char(9), c3:varchar(50), c4:char(9), c5:varchar(50), c6:varchar(9), c7:string, c8:char(50), c9:char(9), c10:string, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1 freckled freckled freckled freckled freckled freckled freckled freckled freckled freckled original +102 1 ox ox ox ox ox ox ox ox ox ox original +103 1 original +104 1 I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked original +105 1 200 200 200 200 200 200 200 200 200 200 original +111 1 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_string_group_string_group_string +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: drop table part_change_string_group_string_group_string +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 bigint1 bigint1 bigint1 _c19 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:tinyint, 2:c2:tinyint, 3:c3:tinyint, 4:c4:tinyint, 5:c5:tinyint, 6:c6:tinyint, 7:c7:smallint, 8:c8:smallint, 9:c9:smallint, 10:c10:smallint, 11:c11:smallint, 12:c12:int, 13:c13:int, 14:c14:int, 15:c15:int, 16:c16:bigint, 17:c17:bigint, 18:c18:bigint, 19:b:string, 20:part:int, 21:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 20, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 20 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] + dataColumns: insert_num:int, c1:tinyint, c2:tinyint, c3:tinyint, c4:tinyint, c5:tinyint, c6:tinyint, c7:smallint, c8:smallint, c9:smallint, c10:smallint, c11:smallint, c12:int, c13:int, c14:int, c15:int, c16:bigint, c17:bigint, c18:bigint, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 1 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL original +102 1 127 127 127 127 127 127 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 23 23 23 23 23 834 834 834 834 834 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 original +105 1 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 original +PREHOOK: query: alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c16 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c17 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c18 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:smallint, 2:c2:int, 3:c3:bigint, 4:c4:decimal(38,18), 5:c5:float, 6:c6:double, 7:c7:int, 8:c8:bigint, 9:c9:decimal(38,18), 10:c10:float, 11:c11:double, 12:c12:bigint, 13:c13:decimal(38,18), 14:c14:float, 15:c15:double, 16:c16:decimal(38,18), 17:c17:float, 18:c18:double, 19:b:string, 20:part:int, 21:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 20, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 20 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] + dataColumns: insert_num:int, c1:smallint, c2:int, c3:bigint, c4:decimal(38,18), c5:float, c6:double, c7:int, c8:bigint, c9:decimal(38,18), c10:float, c11:double, c12:bigint, c13:decimal(38,18), c14:float, c15:double, c16:decimal(38,18), c17:float, c18:double, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 1 -128 -128 -128 -128.000000000000000000 -128.0 -128.0 NULL NULL NULL NULL NULL -2147483648 -2147483648.000000000000000000 -2.14748365E9 -2.147483648E9 NULL NULL NULL original +102 1 127 127 127 127.000000000000000000 127.0 127.0 32767 32767 32767.000000000000000000 32767.0 32767.0 2147483647 2147483647.000000000000000000 2.14748365E9 2.147483647E9 9223372036854775807.000000000000000000 9.223372E18 9.223372036854776E18 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 23 23 23.000000000000000000 23.0 23.0 834 834 834.000000000000000000 834.0 834.0 203332 203332.000000000000000000 203332.0 203332.0 888888857923222.000000000000000000 8.8888885E14 8.88888857923222E14 original +105 1 -99 -99 -99 -99.000000000000000000 -99.0 -99.0 -28300 -28300 -28300.000000000000000000 -28300.0 -28300.0 -999992 -999992.000000000000000000 -999992.0 -999992.0 -222282153733.000000000000000000 -2.22282154E11 -2.22282153733E11 original +111 1 7000 80000 90000000 1234.567800000000000000 9876.543 789.321 80000 90000000 1234.567800000000000000 9876.543 789.321 90000000 1234.567800000000000000 9876.543 789.321 1234.567800000000000000 9876.543 789.321 new +PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 float1 _c4 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:decimal(38,18), 2:c2:decimal(38,18), 3:c3:float, 4:b:string, 5:part:int, 6:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 5, 1, 2, 3, 4] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: insert_num:int, c1:decimal(38,18), c2:decimal(38,18), c3:float, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 99999999999999999999.999999999999999999 99999999999999999999.999999999999999999 Infinity original +102 1 -99999999999999999999.999999999999999999 -99999999999999999999.999999999999999999 -Infinity original +103 1 NULL NULL NULL original +104 1 66475.561431000000000000 66475.561431000000000000 -100.35978 original +105 1 9250340.750000000000000000 9250340.750000000000000000 NULL original +PREHOOK: query: alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:float, 2:c2:double, 3:c3:double, 4:b:string, 5:part:int, 6:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 5, 1, 2, 3, 4] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: insert_num:int, c1:float, c2:double, c3:double, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 1.0E20 1.0E20 Infinity original +102 1 -1.0E20 -1.0E20 -Infinity original +103 1 NULL NULL NULL original +104 1 66475.56 66475.561431 -100.35978 original +105 1 9250341.0 9250340.75 NULL original +111 1 1234.5677 9876.543 1234.5678 new +PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_update_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_update_llap_io.q.out new file mode 100644 index 0000000..f56ec4c --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_update_llap_io.q.out @@ -0,0 +1,537 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: CREATE TABLE partitioned_update_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@partitioned_update_1 +POSTHOOK: query: CREATE TABLE partitioned_update_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@partitioned_update_1 +PREHOOK: query: insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@partitioned_update_1@part=1 +POSTHOOK: query: insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@partitioned_update_1@part=1 +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table partitioned_update_1 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Output: default@partitioned_update_1 +POSTHOOK: query: alter table partitioned_update_1 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Output: default@partitioned_update_1 +PREHOOK: query: insert into table partitioned_update_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_update_1@part=2 +POSTHOOK: query: insert into table partitioned_update_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_update_1@part=2 +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=2).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=2).b SIMPLE [] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=2).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=2).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=2).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_update_1@part=1 +POSTHOOK: query: insert into table partitioned_update_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_update_1@part=1 +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_update_1 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Input: default@partitioned_update_1@part=1 +PREHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Input: default@partitioned_update_1@part=1 +POSTHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +102 1 2147483647 original NULL NULL +103 1 NULL original NULL NULL +104 1 203332 original NULL NULL +105 1 -999992 original NULL NULL +106 2 0 new 0 hangar paralysed companion +107 2 127 new 127 bottom +108 2 NULL new NULL baffling +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +114 1 -128 new -128 smaller than Scotland +115 1 32768 new 32768 major glaciation +PREHOOK: query: update partitioned_update_1 set c=99 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Input: default@partitioned_update_1@part=1 +PREHOOK: Input: default@partitioned_update_1@part=2 +PREHOOK: Output: default@partitioned_update_1@part=1 +PREHOOK: Output: default@partitioned_update_1@part=2 +POSTHOOK: query: update partitioned_update_1 set c=99 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Input: default@partitioned_update_1@part=1 +POSTHOOK: Input: default@partitioned_update_1@part=2 +POSTHOOK: Output: default@partitioned_update_1@part=1 +POSTHOOK: Output: default@partitioned_update_1@part=2 +row__id insert_num a b _c4 d part +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Input: default@partitioned_update_1@part=1 +PREHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Input: default@partitioned_update_1@part=1 +POSTHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original 99 NULL +102 1 2147483647 original 99 NULL +103 1 NULL original 99 NULL +104 1 203332 original 99 NULL +105 1 -999992 original 99 NULL +106 2 0 new 99 hangar paralysed companion +107 2 127 new 99 bottom +108 2 NULL new 99 baffling +109 2 17 new 99 feather weight +110 2 400 new 99 window +111 1 -57923222 new 99 astonishing +112 1 82153733 new 99 Antarctic peninsula +113 1 40000 new 99 Delaware +114 1 -128 new 99 smaller than Scotland +115 1 32768 new 99 major glaciation +PREHOOK: query: alter table partitioned_update_1 partition(part=1) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_update_1 partition(part=1) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: alter table partitioned_update_1 partition(part=2) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_update_1 partition(part=2) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Input: default@partitioned_update_1@part=1 +PREHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_update_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Input: default@partitioned_update_1@part=1 +POSTHOOK: Input: default@partitioned_update_1@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original 99 NULL +102 1 2147483647 original 99 NULL +103 1 NULL original 99 NULL +104 1 203332 original 99 NULL +105 1 -999992 original 99 NULL +106 2 0 new 99 hangar paralysed companion +107 2 127 new 99 bottom +108 2 NULL new 99 baffling +109 2 17 new 99 feather weight +110 2 400 new 99 window +111 1 -57923222 new 99 astonishing +112 1 82153733 new 99 Antarctic peninsula +113 1 40000 new 99 Delaware +114 1 -128 new 99 smaller than Scotland +115 1 32768 new 99 major glaciation +PREHOOK: query: DROP TABLE partitioned_update_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@partitioned_update_1 +PREHOOK: Output: default@partitioned_update_1 +POSTHOOK: query: DROP TABLE partitioned_update_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@partitioned_update_1 +POSTHOOK: Output: default@partitioned_update_1 +PREHOOK: query: CREATE TABLE partitioned_delete_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@partitioned_delete_1 +POSTHOOK: query: CREATE TABLE partitioned_delete_1(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@partitioned_delete_1 +PREHOOK: query: insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@partitioned_delete_1@part=1 +POSTHOOK: query: insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@partitioned_delete_1@part=1 +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table partitioned_delete_1 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Output: default@partitioned_delete_1 +POSTHOOK: query: alter table partitioned_delete_1 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Output: default@partitioned_delete_1 +PREHOOK: query: insert into table partitioned_delete_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_delete_1@part=2 +POSTHOOK: query: insert into table partitioned_delete_1 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_delete_1@part=2 +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=2).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=2).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=2).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=2).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=2).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_delete_1@part=1 +POSTHOOK: query: insert into table partitioned_delete_1 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_delete_1@part=1 +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_1 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select part,a,b,c,d from partitioned_delete_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Input: default@partitioned_delete_1@part=1 +PREHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select part,a,b,c,d from partitioned_delete_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Input: default@partitioned_delete_1@part=1 +POSTHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +part a b c d +1 -128 new -128 smaller than Scotland +1 -2147483648 original NULL NULL +1 -57923222 new -57923222 astonishing +1 -999992 original NULL NULL +1 203332 original NULL NULL +1 2147483647 original NULL NULL +1 32768 new 32768 major glaciation +1 40000 new 40000 Delaware +1 82153733 new 82153733 Antarctic peninsula +1 NULL original NULL NULL +2 0 new 0 hangar paralysed companion +2 127 new 127 bottom +2 17 new 17 feather weight +2 400 new 400 window +2 NULL new NULL baffling +PREHOOK: query: delete from partitioned_delete_1 where insert_num = 102 or insert_num = 104 or insert_num = 106 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Input: default@partitioned_delete_1@part=1 +PREHOOK: Input: default@partitioned_delete_1@part=2 +PREHOOK: Output: default@partitioned_delete_1@part=1 +PREHOOK: Output: default@partitioned_delete_1@part=2 +POSTHOOK: query: delete from partitioned_delete_1 where insert_num = 102 or insert_num = 104 or insert_num = 106 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Input: default@partitioned_delete_1@part=1 +POSTHOOK: Input: default@partitioned_delete_1@part=2 +POSTHOOK: Output: default@partitioned_delete_1@part=1 +POSTHOOK: Output: default@partitioned_delete_1@part=2 +row__id part +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Input: default@partitioned_delete_1@part=1 +PREHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Input: default@partitioned_delete_1@part=1 +POSTHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +103 1 NULL original NULL NULL +105 1 -999992 original NULL NULL +107 2 127 new 127 bottom +108 2 NULL new NULL baffling +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +114 1 -128 new -128 smaller than Scotland +115 1 32768 new 32768 major glaciation +PREHOOK: query: alter table partitioned_delete_1 partition(part=1) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_delete_1 partition(part=1) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: alter table partitioned_delete_1 partition(part=2) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_delete_1 partition(part=2) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Input: default@partitioned_delete_1@part=1 +PREHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Input: default@partitioned_delete_1@part=1 +POSTHOOK: Input: default@partitioned_delete_1@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +103 1 NULL original NULL NULL +105 1 -999992 original NULL NULL +107 2 127 new 127 bottom +108 2 NULL new NULL baffling +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +114 1 -128 new -128 smaller than Scotland +115 1 32768 new 32768 major glaciation +PREHOOK: query: DROP TABLE partitioned_delete_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@partitioned_delete_1 +PREHOOK: Output: default@partitioned_delete_1 +POSTHOOK: query: DROP TABLE partitioned_delete_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@partitioned_delete_1 +POSTHOOK: Output: default@partitioned_delete_1 +PREHOOK: query: CREATE TABLE partitioned_delete_2(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@partitioned_delete_2 +POSTHOOK: query: CREATE TABLE partitioned_delete_2(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@partitioned_delete_2 +PREHOOK: query: insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@partitioned_delete_2@part=1 +POSTHOOK: query: insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@partitioned_delete_2@part=1 +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table partitioned_delete_2 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Output: default@partitioned_delete_2 +POSTHOOK: query: alter table partitioned_delete_2 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Output: default@partitioned_delete_2 +PREHOOK: query: insert into table partitioned_delete_2 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_delete_2@part=2 +POSTHOOK: query: insert into table partitioned_delete_2 partition(part=2) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <=110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_delete_2@part=2 +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=2).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=2).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=2).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=2).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=2).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@partitioned_delete_2@part=1 +POSTHOOK: query: insert into table partitioned_delete_2 partition(part=1) SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@partitioned_delete_2@part=1 +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: partitioned_delete_2 PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Input: default@partitioned_delete_2@part=1 +PREHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Input: default@partitioned_delete_2@part=1 +POSTHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +102 1 2147483647 original NULL NULL +103 1 NULL original NULL NULL +104 1 203332 original NULL NULL +105 1 -999992 original NULL NULL +106 2 0 new 0 hangar paralysed companion +107 2 127 new 127 bottom +108 2 NULL new NULL baffling +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +114 1 -128 new -128 smaller than Scotland +115 1 32768 new 32768 major glaciation +PREHOOK: query: delete from partitioned_delete_2 where insert_num = 108 or insert_num > 113 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Input: default@partitioned_delete_2@part=1 +PREHOOK: Input: default@partitioned_delete_2@part=2 +PREHOOK: Output: default@partitioned_delete_2@part=1 +PREHOOK: Output: default@partitioned_delete_2@part=2 +POSTHOOK: query: delete from partitioned_delete_2 where insert_num = 108 or insert_num > 113 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Input: default@partitioned_delete_2@part=1 +POSTHOOK: Input: default@partitioned_delete_2@part=2 +POSTHOOK: Output: default@partitioned_delete_2@part=1 +POSTHOOK: Output: default@partitioned_delete_2@part=2 +row__id part +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Input: default@partitioned_delete_2@part=1 +PREHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Input: default@partitioned_delete_2@part=1 +POSTHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +102 1 2147483647 original NULL NULL +103 1 NULL original NULL NULL +104 1 203332 original NULL NULL +105 1 -999992 original NULL NULL +106 2 0 new 0 hangar paralysed companion +107 2 127 new 127 bottom +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +PREHOOK: query: alter table partitioned_delete_2 partition(part=1) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_delete_2 partition(part=1) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: alter table partitioned_delete_2 partition(part=2) compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table partitioned_delete_2 partition(part=2) compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +PREHOOK: type: QUERY +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Input: default@partitioned_delete_2@part=1 +PREHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from partitioned_delete_2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Input: default@partitioned_delete_2@part=1 +POSTHOOK: Input: default@partitioned_delete_2@part=2 +#### A masked pattern was here #### +insert_num part a b c d +101 1 -2147483648 original NULL NULL +102 1 2147483647 original NULL NULL +103 1 NULL original NULL NULL +104 1 203332 original NULL NULL +105 1 -999992 original NULL NULL +106 2 0 new 0 hangar paralysed companion +107 2 127 new 127 bottom +109 2 17 new 17 feather weight +110 2 400 new 400 window +111 1 -57923222 new -57923222 astonishing +112 1 82153733 new 82153733 Antarctic peninsula +113 1 40000 new 40000 Delaware +PREHOOK: query: DROP TABLE partitioned_delete_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@partitioned_delete_2 +PREHOOK: Output: default@partitioned_delete_2 +POSTHOOK: query: DROP TABLE partitioned_delete_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@partitioned_delete_2 +POSTHOOK: Output: default@partitioned_delete_2 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out index 45b323b..9c6ff27 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out @@ -94,9 +94,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -240,9 +241,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -448,9 +450,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -580,9 +583,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -709,9 +713,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 22 @@ -829,9 +834,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 22 @@ -953,9 +959,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 17 @@ -1068,9 +1075,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 17 @@ -1185,9 +1193,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -1293,9 +1302,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -1424,9 +1434,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 20 @@ -1546,9 +1557,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 20 @@ -1654,9 +1666,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -1741,9 +1754,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out new file mode 100644 index 0000000..d33876b --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out @@ -0,0 +1,1808 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_add_int_permute_select +POSTHOOK: Lineage: table_add_int_permute_select.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_add_int_permute_select.b SIMPLE [] +POSTHOOK: Lineage: table_add_int_permute_select.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table_add_int_permute_select add columns(c int) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table_add_int_permute_select +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: alter table table_add_int_permute_select add columns(c int) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table_add_int_permute_select +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_add_int_permute_select +POSTHOOK: Lineage: table_add_int_permute_select.a SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.b SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.c SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.insert_num SCRIPT [] +col1 col2 col3 col4 +PREHOOK: query: explain vectorization only detail +select insert_num,a,b,c from table_add_int_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,a,b,c from table_add_int_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:a:int, 2:b:string, 3:c:int, 4:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0, 1, 2, 3] + dataColumns: insert_num:int, a:int, b:string, c:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,a,b from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num a b +101 -2147483648 original +102 2147483647 original +103 NULL original +104 203332 original +105 -999992 original +111 80000 new +PREHOOK: query: select insert_num,a,b,c from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num a b c +101 -2147483648 original NULL +102 2147483647 original NULL +103 NULL original NULL +104 203332 original NULL +105 -999992 original NULL +111 80000 new 80000 +PREHOOK: query: select insert_num,c from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num c +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 80000 +PREHOOK: query: drop table table_add_int_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_add_int_permute_select +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: drop table table_add_int_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_add_int_permute_select +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: Lineage: table_add_int_string_permute_select.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_add_int_string_permute_select.b SIMPLE [] +POSTHOOK: Lineage: table_add_int_string_permute_select.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table_add_int_string_permute_select add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table_add_int_string_permute_select +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: alter table table_add_int_string_permute_select add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table_add_int_string_permute_select +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: Lineage: table_add_int_string_permute_select.a SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.b SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.c SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.d SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.insert_num SCRIPT [] +col1 col2 col3 col4 col5 +PREHOOK: query: explain vectorization only detail +select insert_num,a,b,c,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,a,b,c,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:a:int, 2:b:string, 3:c:int, 4:d:string, 5:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: insert_num:int, a:int, b:string, c:int, d:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,a,b from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b +101 -2147483648 original +102 2147483647 original +103 NULL original +104 203332 original +105 -999992 original +111 80000 new +PREHOOK: query: select insert_num,a,b,c from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b c +101 -2147483648 original NULL +102 2147483647 original NULL +103 NULL original NULL +104 203332 original NULL +105 -999992 original NULL +111 80000 new 80000 +PREHOOK: query: select insert_num,a,b,c,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b c d +101 -2147483648 original NULL NULL +102 2147483647 original NULL NULL +103 NULL original NULL NULL +104 203332 original NULL NULL +105 -999992 original NULL NULL +111 80000 new 80000 filler +PREHOOK: query: select insert_num,a,c,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,c,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a c d +101 -2147483648 NULL NULL +102 2147483647 NULL NULL +103 NULL NULL NULL +104 203332 NULL NULL +105 -999992 NULL NULL +111 80000 80000 filler +PREHOOK: query: select insert_num,a,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a d +101 -2147483648 NULL +102 2147483647 NULL +103 NULL NULL +104 203332 NULL +105 -999992 NULL +111 80000 filler +PREHOOK: query: select insert_num,c from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num c +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 80000 +PREHOOK: query: select insert_num,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num d +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 filler +PREHOOK: query: drop table table_add_int_string_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_add_int_string_permute_select +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: drop table table_add_int_string_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_add_int_string_permute_select +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_string_group_double +POSTHOOK: Lineage: table_change_string_group_double.b SIMPLE [] +POSTHOOK: Lineage: table_change_string_group_double.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_string_group_double +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_string_group_double +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_string_group_double +POSTHOOK: Lineage: table_change_string_group_double.b SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,b from table_change_string_group_double +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,b from table_change_string_group_double +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:double, 2:c2:double, 3:c3:double, 4:b:string, 5:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: insert_num:int, c1:double, c2:double, c3:double, b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_double +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_double +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 original +102 -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 original +103 NULL NULL NULL original +104 30.774 30.774 30.774 original +105 46114.28 46114.28 46114.28 original +111 789.321 789.321 789.321 new +PREHOOK: query: drop table table_change_string_group_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_string_group_double +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: drop table table_change_string_group_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_string_group_double +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 date1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c11 +PREHOOK: query: alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_date_group_string_group_date_group +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:char(50), 3:c3:char(15), 4:c4:varchar(50), 5:c5:varchar(15), 6:c6:string, 7:c7:char(50), 8:c8:char(15), 9:c9:varchar(50), 10:c10:varchar(15), 11:b:string, 12:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 12 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + dataColumns: insert_num:int, c1:string, c2:char(50), c3:char(15), c4:varchar(50), c5:varchar(15), c6:string, c7:char(50), c8:char(15), c9:varchar(50), c10:varchar(15), b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_date_group_string_group_date_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original +102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14.0 1978-08-02 06:34:14.0 1978-08-02 06:3 1978-08-02 06:34:14.0 1978-08-02 06:3 original +105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original +111 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_date_group_string_group_date_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_date_group_string_group_date_group +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: drop table table_change_date_group_string_group_date_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 _c21 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:tinyint, 2:c2:smallint, 3:c3:int, 4:c4:bigint, 5:c5:tinyint, 6:c6:smallint, 7:c7:int, 8:c8:bigint, 9:c9:tinyint, 10:c10:smallint, 11:c11:int, 12:c12:bigint, 13:c13:tinyint, 14:c14:smallint, 15:c15:int, 16:c16:bigint, 17:c17:tinyint, 18:c18:smallint, 19:c19:int, 20:c20:bigint, 21:b:string, 22:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 22 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + dataColumns: insert_num:int, c1:tinyint, c2:smallint, c3:int, c4:bigint, c5:tinyint, c6:smallint, c7:int, c8:bigint, c9:tinyint, c10:smallint, c11:int, c12:bigint, c13:tinyint, c14:smallint, c15:int, c16:bigint, c17:tinyint, c18:smallint, c19:int, c20:bigint, b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL original +102 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 original +105 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 original +PREHOOK: query: alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c16 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c17 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c18 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c19 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c20 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:string, 3:c3:string, 4:c4:string, 5:c5:char(50), 6:c6:char(50), 7:c7:char(50), 8:c8:char(50), 9:c9:char(5), 10:c10:char(5), 11:c11:char(5), 12:c12:char(5), 13:c13:varchar(50), 14:c14:varchar(50), 15:c15:varchar(50), 16:c16:varchar(50), 17:c17:varchar(5), 18:c18:varchar(5), 19:c19:varchar(5), 20:c20:varchar(5), 21:b:string, 22:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 22 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + dataColumns: insert_num:int, c1:string, c2:string, c3:string, c4:string, c5:char(50), c6:char(50), c7:char(50), c8:char(50), c9:char(5), c10:char(5), c11:char(5), c12:char(5), c13:varchar(50), c14:varchar(50), c15:varchar(50), c16:varchar(50), c17:varchar(5), c18:varchar(5), c19:varchar(5), c20:varchar(5), b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL original +102 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 20333 88888 23 834 203332 888888857923222 23 834 20333 88888 original +105 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 original +111 filler filler filler filler filler filler filler filler fille fille fille fille filler filler filler filler fille fille fille fille new +PREHOOK: query: drop table table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: drop table table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 _c16 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:decimal(38,18), 2:c2:float, 3:c3:double, 4:c4:decimal(38,18), 5:c5:float, 6:c6:double, 7:c7:decimal(38,18), 8:c8:float, 9:c9:double, 10:c10:decimal(38,18), 11:c11:float, 12:c12:double, 13:c13:decimal(38,18), 14:c14:float, 15:c15:double, 16:b:string, 17:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 17 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + dataColumns: insert_num:int, c1:decimal(38,18), c2:float, c3:double, c4:decimal(38,18), c5:float, c6:double, c7:decimal(38,18), c8:float, c9:double, c10:decimal(38,18), c11:float, c12:double, c13:decimal(38,18), c14:float, c15:double, b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 original +102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 original +105 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 original +PREHOOK: query: alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:string, 3:c3:string, 4:c4:char(50), 5:c5:char(50), 6:c6:char(50), 7:c7:char(7), 8:c8:char(7), 9:c9:char(7), 10:c10:varchar(50), 11:c11:varchar(50), 12:c12:varchar(50), 13:c13:varchar(7), 14:c14:varchar(7), 15:c15:varchar(7), 16:b:string, 17:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 17 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + dataColumns: insert_num:int, c1:string, c2:string, c3:string, c4:char(50), c5:char(50), c6:char(50), c7:char(7), c8:char(7), c9:char(7), c10:varchar(50), c11:varchar(50), c12:varchar(50), c13:varchar(7), c14:varchar(7), c15:varchar(7), b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original +102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original +105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original +111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: Lineage: table_change_string_group_string_group_string.b SIMPLE [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c5 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c6 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c7 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c8 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:string, 3:c3:string, 4:c4:string, 5:c5:char(50), 6:c6:char(50), 7:c7:char(50), 8:c8:varchar(50), 9:c9:varchar(50), 10:c10:varchar(50), 11:b:string, 12:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 11] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 12 + includeColumns: [0, 1, 2, 3, 4, 11] + dataColumns: insert_num:int, c1:string, c2:string, c3:string, c4:string, c5:char(50), c6:char(50), c7:char(50), c8:varchar(50), c9:varchar(50), c10:varchar(50), b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 b +101 freckled freckled freckled freckled original +102 ox ox ox ox original +103 original +104 I cooked I cooked I cooked I cooked original +105 200 200 200 200 original +PREHOOK: query: alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_string_group_string_group_string +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_string_group_string_group_string +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: Lineage: table_change_string_group_string_group_string.b SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:char(50), 2:c2:char(9), 3:c3:varchar(50), 4:c4:char(9), 5:c5:varchar(50), 6:c6:varchar(9), 7:c7:string, 8:c8:char(50), 9:c9:char(9), 10:c10:string, 11:b:string, 12:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 12 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + dataColumns: insert_num:int, c1:char(50), c2:char(9), c3:varchar(50), c4:char(9), c5:varchar(50), c6:varchar(9), c7:string, c8:char(50), c9:char(9), c10:string, b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 freckled freckled freckled freckled freckled freckled freckled freckled freckled freckled original +102 ox ox ox ox ox ox ox ox ox ox original +103 original +104 I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked original +105 200 200 200 200 200 200 200 200 200 200 original +111 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_string_group_string_group_string +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_string_group_string_group_string +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: drop table table_change_string_group_string_group_string +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_string_group_string_group_string +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.b SIMPLE [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 bigint1 bigint1 bigint1 _c19 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:tinyint, 2:c2:tinyint, 3:c3:tinyint, 4:c4:tinyint, 5:c5:tinyint, 6:c6:tinyint, 7:c7:smallint, 8:c8:smallint, 9:c9:smallint, 10:c10:smallint, 11:c11:smallint, 12:c12:int, 13:c13:int, 14:c14:int, 15:c15:int, 16:c16:bigint, 17:c17:bigint, 18:c18:bigint, 19:b:string, 20:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 20 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] + dataColumns: insert_num:int, c1:tinyint, c2:tinyint, c3:tinyint, c4:tinyint, c5:tinyint, c6:tinyint, c7:smallint, c8:smallint, c9:smallint, c10:smallint, c11:smallint, c12:int, c13:int, c14:int, c15:int, c16:bigint, c17:bigint, c18:bigint, b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL original +102 127 127 127 127 127 127 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 23 23 23 23 23 834 834 834 834 834 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 original +105 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 original +PREHOOK: query: alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.b SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c16 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c17 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c18 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:smallint, 2:c2:int, 3:c3:bigint, 4:c4:decimal(38,18), 5:c5:float, 6:c6:double, 7:c7:int, 8:c8:bigint, 9:c9:decimal(38,18), 10:c10:float, 11:c11:double, 12:c12:bigint, 13:c13:decimal(38,18), 14:c14:float, 15:c15:double, 16:c16:decimal(38,18), 17:c17:float, 18:c18:double, 19:b:string, 20:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 20 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] + dataColumns: insert_num:int, c1:smallint, c2:int, c3:bigint, c4:decimal(38,18), c5:float, c6:double, c7:int, c8:bigint, c9:decimal(38,18), c10:float, c11:double, c12:bigint, c13:decimal(38,18), c14:float, c15:double, c16:decimal(38,18), c17:float, c18:double, b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 -128 -128 -128 -128.000000000000000000 -128.0 -128.0 NULL NULL NULL NULL NULL -2147483648 -2147483648.000000000000000000 -2.14748365E9 -2.147483648E9 NULL NULL NULL original +102 127 127 127 127.000000000000000000 127.0 127.0 32767 32767 32767.000000000000000000 32767.0 32767.0 2147483647 2147483647.000000000000000000 2.14748365E9 2.147483647E9 9223372036854775807.000000000000000000 9.223372E18 9.223372036854776E18 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 23 23 23.000000000000000000 23.0 23.0 834 834 834.000000000000000000 834.0 834.0 203332 203332.000000000000000000 203332.0 203332.0 888888857923222.000000000000000000 8.8888885E14 8.88888857923222E14 original +105 -99 -99 -99 -99.000000000000000000 -99.0 -99.0 -28300 -28300 -28300.000000000000000000 -28300.0 -28300.0 -999992 -999992.000000000000000000 -999992.0 -999992.0 -222282153733.000000000000000000 -2.22282154E11 -2.22282153733E11 original +111 7000 80000 90000000 1234.567800000000000000 9876.543 789.321 80000 90000000 1234.567800000000000000 9876.543 789.321 90000000 1234.567800000000000000 9876.543 789.321 1234.567800000000000000 9876.543 789.321 new +PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) clustered by (c1) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.b SIMPLE [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 float1 _c4 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:decimal(38,18), 2:c2:decimal(38,18), 3:c3:float, 4:b:string, 5:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: insert_num:int, c1:decimal(38,18), c2:decimal(38,18), c3:float, b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 99999999999999999999.999999999999999999 99999999999999999999.999999999999999999 Infinity original +102 -99999999999999999999.999999999999999999 -99999999999999999999.999999999999999999 -Infinity original +103 NULL NULL NULL original +104 66475.561431000000000000 66475.561431000000000000 -100.35978 original +105 9250340.750000000000000000 9250340.750000000000000000 NULL original +PREHOOK: query: alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.b SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only detail +select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:float, 2:c2:double, 3:c3:double, 4:b:string, 5:ROW__ID:struct] + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4] + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Execution mode: vectorized, llap + LLAP IO: may be used (ACID table) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: insert_num:int, c1:float, c2:double, c3:double, b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 1.0E20 1.0E20 Infinity original +102 -1.0E20 -1.0E20 -Infinity original +103 NULL NULL NULL original +104 66475.56 66475.561431 -100.35978 original +105 9250341.0 9250340.75 NULL original +111 1234.5677 9876.543 1234.5678 new +PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_update_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_update_llap_io.q.out new file mode 100644 index 0000000..2292dcb --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_update_llap_io.q.out @@ -0,0 +1,449 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: CREATE TABLE table5(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table5 +POSTHOOK: query: CREATE TABLE table5(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table5 +PREHOOK: query: insert into table table5 SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table5 +POSTHOOK: query: insert into table table5 SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table5 +POSTHOOK: Lineage: table5.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table5.b SIMPLE [] +POSTHOOK: Lineage: table5.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table5 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table5 +PREHOOK: Output: default@table5 +POSTHOOK: query: alter table table5 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table5 +POSTHOOK: Output: default@table5 +PREHOOK: query: insert into table table5 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@table5 +POSTHOOK: query: insert into table table5 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@table5 +POSTHOOK: Lineage: table5.a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table5.b SIMPLE [] +POSTHOOK: Lineage: table5.c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table5.d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: table5.insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select a,b,c,d from table5 +PREHOOK: type: QUERY +PREHOOK: Input: default@table5 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table5 +#### A masked pattern was here #### +a b c d +-128 new -128 smaller than Scotland +-2147483648 original NULL NULL +-57923222 new -57923222 astonishing +-999992 original NULL NULL +0 new 0 hangar paralysed companion +127 new 127 bottom +17 new 17 feather weight +203332 original NULL NULL +2147483647 original NULL NULL +32768 new 32768 major glaciation +400 new 400 window +40000 new 40000 Delaware +82153733 new 82153733 Antarctic peninsula +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: update table5 set c=99 +PREHOOK: type: QUERY +PREHOOK: Input: default@table5 +PREHOOK: Output: default@table5 +POSTHOOK: query: update table5 set c=99 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table5 +POSTHOOK: Output: default@table5 +row__id insert_num a b _c4 d +PREHOOK: query: select a,b,c,d from table5 +PREHOOK: type: QUERY +PREHOOK: Input: default@table5 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table5 +#### A masked pattern was here #### +a b c d +-128 new 99 smaller than Scotland +-2147483648 original 99 NULL +-57923222 new 99 astonishing +-999992 original 99 NULL +0 new 99 hangar paralysed companion +127 new 99 bottom +17 new 99 feather weight +203332 original 99 NULL +2147483647 original 99 NULL +32768 new 99 major glaciation +400 new 99 window +40000 new 99 Delaware +82153733 new 99 Antarctic peninsula +NULL new 99 baffling +NULL original 99 NULL +PREHOOK: query: alter table table5 compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table table5 compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select a,b,c,d from table5 +PREHOOK: type: QUERY +PREHOOK: Input: default@table5 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table5 +#### A masked pattern was here #### +a b c d +-128 new 99 smaller than Scotland +-2147483648 original 99 NULL +-57923222 new 99 astonishing +-999992 original 99 NULL +0 new 99 hangar paralysed companion +127 new 99 bottom +17 new 99 feather weight +203332 original 99 NULL +2147483647 original 99 NULL +32768 new 99 major glaciation +400 new 99 window +40000 new 99 Delaware +82153733 new 99 Antarctic peninsula +NULL new 99 baffling +NULL original 99 NULL +PREHOOK: query: DROP TABLE table5 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table5 +PREHOOK: Output: default@table5 +POSTHOOK: query: DROP TABLE table5 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table5 +POSTHOOK: Output: default@table5 +PREHOOK: query: CREATE TABLE table6(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table6 +POSTHOOK: query: CREATE TABLE table6(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table6 +PREHOOK: query: insert into table table6 SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table6 +POSTHOOK: query: insert into table table6 SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table6 +POSTHOOK: Lineage: table6.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table6.b SIMPLE [] +POSTHOOK: Lineage: table6.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table6 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table6 +PREHOOK: Output: default@table6 +POSTHOOK: query: alter table table6 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table6 +POSTHOOK: Output: default@table6 +PREHOOK: query: insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@table6 +POSTHOOK: query: insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@table6 +POSTHOOK: Lineage: table6.a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table6.b SIMPLE [] +POSTHOOK: Lineage: table6.c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table6.d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: table6.insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@table6 +POSTHOOK: query: insert into table table6 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@table6 +POSTHOOK: Lineage: table6.a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table6.b SIMPLE [] +POSTHOOK: Lineage: table6.c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table6.d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: table6.insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select a,b,c,d from table6 +PREHOOK: type: QUERY +PREHOOK: Input: default@table6 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table6 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table6 +#### A masked pattern was here #### +a b c d +-128 new -128 smaller than Scotland +-2147483648 original NULL NULL +-57923222 new -57923222 astonishing +-999992 original NULL NULL +0 new 0 hangar paralysed companion +127 new 127 bottom +17 new 17 feather weight +203332 original NULL NULL +2147483647 original NULL NULL +32768 new 32768 major glaciation +400 new 400 window +40000 new 40000 Delaware +82153733 new 82153733 Antarctic peninsula +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: delete from table6 where insert_num = 102 or insert_num = 104 or insert_num = 106 +PREHOOK: type: QUERY +PREHOOK: Input: default@table6 +PREHOOK: Output: default@table6 +POSTHOOK: query: delete from table6 where insert_num = 102 or insert_num = 104 or insert_num = 106 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table6 +POSTHOOK: Output: default@table6 +row__id +PREHOOK: query: select a,b,c,d from table6 +PREHOOK: type: QUERY +PREHOOK: Input: default@table6 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table6 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table6 +#### A masked pattern was here #### +a b c d +-128 new -128 smaller than Scotland +-2147483648 original NULL NULL +-57923222 new -57923222 astonishing +-999992 original NULL NULL +127 new 127 bottom +17 new 17 feather weight +32768 new 32768 major glaciation +400 new 400 window +40000 new 40000 Delaware +82153733 new 82153733 Antarctic peninsula +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: alter table table6 compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table table6 compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select a,b,c,d from table6 +PREHOOK: type: QUERY +PREHOOK: Input: default@table6 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table6 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table6 +#### A masked pattern was here #### +a b c d +-128 new -128 smaller than Scotland +-2147483648 original NULL NULL +-57923222 new -57923222 astonishing +-999992 original NULL NULL +127 new 127 bottom +17 new 17 feather weight +32768 new 32768 major glaciation +400 new 400 window +40000 new 40000 Delaware +82153733 new 82153733 Antarctic peninsula +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: DROP TABLE table6 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table6 +PREHOOK: Output: default@table6 +POSTHOOK: query: DROP TABLE table6 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table6 +POSTHOOK: Output: default@table6 +PREHOOK: query: CREATE TABLE table7(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table7 +POSTHOOK: query: CREATE TABLE table7(insert_num int, a INT, b STRING) clustered by (a) into 2 buckets STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table7 +PREHOOK: query: insert into table table7 SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table7 +POSTHOOK: query: insert into table table7 SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table7 +POSTHOOK: Lineage: table7.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table7.b SIMPLE [] +POSTHOOK: Lineage: table7.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table7 add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table7 +PREHOOK: Output: default@table7 +POSTHOOK: query: alter table table7 add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table7 +POSTHOOK: Output: default@table7 +PREHOOK: query: insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@table7 +POSTHOOK: query: insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num <= 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@table7 +POSTHOOK: Lineage: table7.a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table7.b SIMPLE [] +POSTHOOK: Lineage: table7.c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table7.d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: table7.insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@table7 +POSTHOOK: query: insert into table table7 SELECT insert_num, int1, 'new', int1, string1 FROM schema_evolution_data_2 WHERE insert_num > 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@table7 +POSTHOOK: Lineage: table7.a SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table7.b SIMPLE [] +POSTHOOK: Lineage: table7.c SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table7.d SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:string1, type:string, comment:null), ] +POSTHOOK: Lineage: table7.insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 int1 string1 +PREHOOK: query: select a,b,c,d from table7 +PREHOOK: type: QUERY +PREHOOK: Input: default@table7 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table7 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table7 +#### A masked pattern was here #### +a b c d +-128 new -128 smaller than Scotland +-2147483648 original NULL NULL +-57923222 new -57923222 astonishing +-999992 original NULL NULL +0 new 0 hangar paralysed companion +127 new 127 bottom +17 new 17 feather weight +203332 original NULL NULL +2147483647 original NULL NULL +32768 new 32768 major glaciation +400 new 400 window +40000 new 40000 Delaware +82153733 new 82153733 Antarctic peninsula +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: delete from table7 where insert_num = 107 or insert_num >= 110 +PREHOOK: type: QUERY +PREHOOK: Input: default@table7 +PREHOOK: Output: default@table7 +POSTHOOK: query: delete from table7 where insert_num = 107 or insert_num >= 110 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table7 +POSTHOOK: Output: default@table7 +row__id +PREHOOK: query: select a,b,c,d from table7 +PREHOOK: type: QUERY +PREHOOK: Input: default@table7 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table7 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table7 +#### A masked pattern was here #### +a b c d +-2147483648 original NULL NULL +-999992 original NULL NULL +0 new 0 hangar paralysed companion +17 new 17 feather weight +203332 original NULL NULL +2147483647 original NULL NULL +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: alter table table7 compact 'major' +PREHOOK: type: ALTERTABLE_COMPACT +POSTHOOK: query: alter table table7 compact 'major' +POSTHOOK: type: ALTERTABLE_COMPACT +PREHOOK: query: select a,b,c,d from table7 +PREHOOK: type: QUERY +PREHOOK: Input: default@table7 +#### A masked pattern was here #### +POSTHOOK: query: select a,b,c,d from table7 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table7 +#### A masked pattern was here #### +a b c d +-2147483648 original NULL NULL +-999992 original NULL NULL +0 new 0 hangar paralysed companion +17 new 17 feather weight +203332 original NULL NULL +2147483647 original NULL NULL +NULL new NULL baffling +NULL original NULL NULL +PREHOOK: query: DROP TABLE table7 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table7 +PREHOOK: Output: default@table7 +POSTHOOK: query: DROP TABLE table7 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table7 +POSTHOOK: Output: default@table7 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out new file mode 100644 index 0000000..60c143e --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out @@ -0,0 +1,687 @@ +PREHOOK: query: CREATE TABLE part_change_various_various_struct1(insert_num int, s1 STRUCT, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_struct1 +POSTHOOK: query: CREATE TABLE part_change_various_various_struct1(insert_num int, s1 STRUCT, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_struct1 +PREHOOK: query: CREATE TABLE complex_struct1_a_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct1_a_txt +POSTHOOK: query: CREATE TABLE complex_struct1_a_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct1_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_a.txt' overwrite into table complex_struct1_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct1_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_a.txt' overwrite into table complex_struct1_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct1_a_txt +PREHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct1_a_txt +PREHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct1_a_txt +POSTHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).b SIMPLE [(complex_struct1_a_txt)complex_struct1_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).insert_num SIMPLE [(complex_struct1_a_txt)complex_struct1_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).s1 SIMPLE [(complex_struct1_a_txt)complex_struct1_a_txt.FieldSchema(name:s1, type:struct, comment:null), ] +complex_struct1_a_txt.insert_num complex_struct1_a_txt.s1 complex_struct1_a_txt.b +PREHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Input: default@part_change_various_various_struct1@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Input: default@part_change_various_various_struct1@part=1 +#### A masked pattern was here #### +insert_num part s1 b +1 1 {"c1":true,"c2":null,"c3":null,"c4":3244222,"c5":-99999999999,"c6":-29.0764,"c7":4.70614135E8,"c8":470614135,"c9":"dynamic reptile ","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":n)گ} original +2 1 {"c1":null,"c2":100,"c3":null,"c4":14,"c5":-23866739993,"c6":-3651.672,"c7":46114.284799488,"c8":46114.284799488,"c9":" baffling ","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":n)گ} original +3 1 {"c1":false,"c2":72,"c3":null,"c4":-93222,"c5":30,"c6":-66475.56,"c7":-66475.561431,"c8":0.561431,"c9":"1 ","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":n)گ} original +4 1 {"c1":null,"c2":-90,"c3":null,"c4":3289094,"c5":46114,"c6":9250341.0,"c7":9250340.75,"c8":9250340.75,"c9":"junkyard ","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":n)گ} original +PREHOOK: query: alter table part_change_various_various_struct1 replace columns (insert_num int, s1 STRUCT, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Output: default@part_change_various_various_struct1 +POSTHOOK: query: alter table part_change_various_various_struct1 replace columns (insert_num int, s1 STRUCT, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Output: default@part_change_various_various_struct1 +PREHOOK: query: CREATE TABLE complex_struct1_b_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct1_b_txt +POSTHOOK: query: CREATE TABLE complex_struct1_b_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct1_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_b.txt' overwrite into table complex_struct1_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct1_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_b.txt' overwrite into table complex_struct1_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct1_b_txt +PREHOOK: query: insert into table part_change_various_various_struct1 partition(part=2) select * from complex_struct1_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct1_b_txt +PREHOOK: Output: default@part_change_various_various_struct1@part=2 +POSTHOOK: query: insert into table part_change_various_various_struct1 partition(part=2) select * from complex_struct1_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct1_b_txt +POSTHOOK: Output: default@part_change_various_various_struct1@part=2 +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=2).b SIMPLE [(complex_struct1_b_txt)complex_struct1_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=2).insert_num SIMPLE [(complex_struct1_b_txt)complex_struct1_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=2).s1 SIMPLE [(complex_struct1_b_txt)complex_struct1_b_txt.FieldSchema(name:s1, type:struct, comment:null), ] +complex_struct1_b_txt.insert_num complex_struct1_b_txt.s1 complex_struct1_b_txt.b +PREHOOK: query: CREATE TABLE complex_struct1_c_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct1_c_txt +POSTHOOK: query: CREATE TABLE complex_struct1_c_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct1_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_c.txt' overwrite into table complex_struct1_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct1_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_c.txt' overwrite into table complex_struct1_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct1_c_txt +PREHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct1_c_txt +PREHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct1_c_txt +POSTHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).b SIMPLE [(complex_struct1_c_txt)complex_struct1_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).insert_num SIMPLE [(complex_struct1_c_txt)complex_struct1_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).s1 SIMPLE [(complex_struct1_c_txt)complex_struct1_c_txt.FieldSchema(name:s1, type:struct, comment:null), ] +complex_struct1_c_txt.insert_num complex_struct1_c_txt.s1 complex_struct1_c_txt.b +PREHOOK: query: explain vectorization detail +select insert_num,part,s1,b from part_change_various_various_struct1 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,s1,b from part_change_various_various_struct1 +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_struct1 + Statistics: Num rows: 6 Data size: 21030 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), s1 (type: struct), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 6 Data size: 16320 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 16320 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Input: default@part_change_various_various_struct1@part=1 +PREHOOK: Input: default@part_change_various_various_struct1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Input: default@part_change_various_various_struct1@part=1 +POSTHOOK: Input: default@part_change_various_various_struct1@part=2 +#### A masked pattern was here #### +insert_num part s1 b +1 1 {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} original +2 1 {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"} original +3 1 {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} original +4 1 {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} original +5 2 {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"} new +6 1 {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"} new +PREHOOK: query: drop table part_change_various_various_struct1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Output: default@part_change_various_various_struct1 +POSTHOOK: query: drop table part_change_various_various_struct1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Output: default@part_change_various_various_struct1 +PREHOOK: query: CREATE TABLE part_add_various_various_struct2(insert_num int, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: CREATE TABLE part_add_various_various_struct2(insert_num int, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) + values(1, 'original'), + (2, 'original') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) + values(1, 'original'), + (2, 'original') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).insert_num SCRIPT [] +col1 col2 +PREHOOK: query: select insert_num,part,b from part_add_various_various_struct2 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Input: default@part_add_various_various_struct2@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b from part_add_various_various_struct2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Input: default@part_add_various_various_struct2@part=1 +#### A masked pattern was here #### +insert_num part b +1 1 original +2 1 original +PREHOOK: query: alter table part_add_various_various_struct2 ADD columns (s2 STRUCT) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: alter table part_add_various_various_struct2 ADD columns (s2 STRUCT) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: CREATE TABLE complex_struct2_a_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_a_txt +POSTHOOK: query: CREATE TABLE complex_struct2_a_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_a.txt' overwrite into table complex_struct2_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_a.txt' overwrite into table complex_struct2_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_a_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_a_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_a_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).b SIMPLE [(complex_struct2_a_txt)complex_struct2_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).insert_num SIMPLE [(complex_struct2_a_txt)complex_struct2_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).s2 SIMPLE [(complex_struct2_a_txt)complex_struct2_a_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_a_txt.insert_num complex_struct2_a_txt.b complex_struct2_a_txt.s2 +PREHOOK: query: CREATE TABLE complex_struct2_b_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_b_txt +POSTHOOK: query: CREATE TABLE complex_struct2_b_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_b.txt' overwrite into table complex_struct2_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_b.txt' overwrite into table complex_struct2_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_b_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_b_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_b_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).b SIMPLE [(complex_struct2_b_txt)complex_struct2_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).insert_num SIMPLE [(complex_struct2_b_txt)complex_struct2_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).s2 SIMPLE [(complex_struct2_b_txt)complex_struct2_b_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_b_txt.insert_num complex_struct2_b_txt.b complex_struct2_b_txt.s2 +PREHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Input: default@part_add_various_various_struct2@part=1 +PREHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Input: default@part_add_various_various_struct2@part=1 +POSTHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +insert_num part b s2 +1 1 original NULL +2 1 original NULL +3 1 new {"c1":true,"c2":null,"c3":null,"c4":3244222,"c5":-99999999999,"c6":-29.0764,"c7":4.70614135E8,"c8":470614135,"c9":"dynamic reptile ","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":n)گ} +4 1 new {"c1":null,"c2":100,"c3":null,"c4":14,"c5":-23866739993,"c6":-3651.672,"c7":46114.284799488,"c8":46114.284799488,"c9":" baffling ","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":n)گ} +5 2 new {"c1":false,"c2":72,"c3":null,"c4":-93222,"c5":30,"c6":-66475.56,"c7":-66475.561431,"c8":0.561431,"c9":"1 ","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":n)گ} +6 2 new {"c1":null,"c2":-90,"c3":null,"c4":3289094,"c5":46114,"c6":9250341.0,"c7":9250340.75,"c8":9250340.75,"c9":"junkyard ","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":n)گ} +PREHOOK: query: alter table part_add_various_various_struct2 REPLACE columns (insert_num int, b STRING, s2 STRUCT) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: alter table part_add_various_various_struct2 REPLACE columns (insert_num int, b STRING, s2 STRUCT) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: CREATE TABLE complex_struct2_c_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_c_txt +POSTHOOK: query: CREATE TABLE complex_struct2_c_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_c.txt' overwrite into table complex_struct2_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_c.txt' overwrite into table complex_struct2_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_c_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_c_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_c_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).b SIMPLE [(complex_struct2_c_txt)complex_struct2_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).insert_num SIMPLE [(complex_struct2_c_txt)complex_struct2_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).s2 SIMPLE [(complex_struct2_c_txt)complex_struct2_c_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_c_txt.insert_num complex_struct2_c_txt.b complex_struct2_c_txt.s2 +PREHOOK: query: CREATE TABLE complex_struct2_d_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_d_txt +POSTHOOK: query: CREATE TABLE complex_struct2_d_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_d_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_d.txt' overwrite into table complex_struct2_d_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_d_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_d.txt' overwrite into table complex_struct2_d_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_d_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_d_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_d_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_d_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_d_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).b SIMPLE [(complex_struct2_d_txt)complex_struct2_d_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).insert_num SIMPLE [(complex_struct2_d_txt)complex_struct2_d_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).s2 SIMPLE [(complex_struct2_d_txt)complex_struct2_d_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_d_txt.insert_num complex_struct2_d_txt.b complex_struct2_d_txt.s2 +PREHOOK: query: explain vectorization detail +select insert_num,part,b,s2 from part_add_various_various_struct2 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,b,s2 from part_add_various_various_struct2 +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_various_various_struct2 + Statistics: Num rows: 8 Data size: 800 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), b (type: string), s2 (type: struct) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 8 Data size: 800 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 8 Data size: 800 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Input: default@part_add_various_various_struct2@part=1 +PREHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Input: default@part_add_various_various_struct2@part=1 +POSTHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +insert_num part b s2 +1 1 original NULL +2 1 original NULL +3 1 new {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} +4 1 new {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"} +5 2 new {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} +6 2 new {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} +7 2 new {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"} +8 1 new {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"} +PREHOOK: query: drop table part_add_various_various_struct2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: drop table part_add_various_various_struct2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: CREATE TABLE part_add_to_various_various_struct4(insert_num int, b STRING, s3 STRUCT) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_to_various_various_struct4 +POSTHOOK: query: CREATE TABLE part_add_to_various_various_struct4(insert_num int, b STRING, s3 STRUCT) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_to_various_various_struct4 +PREHOOK: query: CREATE TABLE complex_struct4_a_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct4_a_txt +POSTHOOK: query: CREATE TABLE complex_struct4_a_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct4_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_a.txt' overwrite into table complex_struct4_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct4_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_a.txt' overwrite into table complex_struct4_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct4_a_txt +PREHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct4_a_txt +PREHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct4_a_txt +POSTHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).b SIMPLE [(complex_struct4_a_txt)complex_struct4_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).insert_num SIMPLE [(complex_struct4_a_txt)complex_struct4_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).s3 SIMPLE [(complex_struct4_a_txt)complex_struct4_a_txt.FieldSchema(name:s3, type:struct, comment:null), ] +complex_struct4_a_txt.insert_num complex_struct4_a_txt.b complex_struct4_a_txt.s3 +PREHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Input: default@part_add_to_various_various_struct4@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Input: default@part_add_to_various_various_struct4@part=1 +#### A masked pattern was here #### +insert_num part b s3 +1 1 original {"c1":true,"c2":null,"c3":null,"c4":3244222,"c5":-99999999999} +2 1 original {"c1":null,"c2":100,"c3":null,"c4":14,"c5":-23866739993} +PREHOOK: query: alter table part_add_to_various_various_struct4 replace columns (insert_num int, b STRING, s3 STRUCT) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Output: default@part_add_to_various_various_struct4 +POSTHOOK: query: alter table part_add_to_various_various_struct4 replace columns (insert_num int, b STRING, s3 STRUCT) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Output: default@part_add_to_various_various_struct4 +PREHOOK: query: CREATE TABLE complex_struct4_b_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct4_b_txt +POSTHOOK: query: CREATE TABLE complex_struct4_b_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct4_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_b.txt' overwrite into table complex_struct4_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct4_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_b.txt' overwrite into table complex_struct4_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct4_b_txt +PREHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=2) select * from complex_struct4_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct4_b_txt +PREHOOK: Output: default@part_add_to_various_various_struct4@part=2 +POSTHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=2) select * from complex_struct4_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct4_b_txt +POSTHOOK: Output: default@part_add_to_various_various_struct4@part=2 +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=2).b SIMPLE [(complex_struct4_b_txt)complex_struct4_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=2).insert_num SIMPLE [(complex_struct4_b_txt)complex_struct4_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=2).s3 SIMPLE [(complex_struct4_b_txt)complex_struct4_b_txt.FieldSchema(name:s3, type:struct, comment:null), ] +complex_struct4_b_txt.insert_num complex_struct4_b_txt.b complex_struct4_b_txt.s3 +PREHOOK: query: CREATE TABLE complex_struct4_c_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct4_c_txt +POSTHOOK: query: CREATE TABLE complex_struct4_c_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct4_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_c.txt' overwrite into table complex_struct4_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct4_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_c.txt' overwrite into table complex_struct4_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct4_c_txt +PREHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct4_c_txt +PREHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct4_c_txt +POSTHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).b SIMPLE [(complex_struct4_c_txt)complex_struct4_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).insert_num SIMPLE [(complex_struct4_c_txt)complex_struct4_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).s3 SIMPLE [(complex_struct4_c_txt)complex_struct4_c_txt.FieldSchema(name:s3, type:struct, comment:null), ] +complex_struct4_c_txt.insert_num complex_struct4_c_txt.b complex_struct4_c_txt.s3 +PREHOOK: query: explain vectorization detail +select insert_num,part,b,s3 from part_add_to_various_various_struct4 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,b,s3 from part_add_to_various_various_struct4 +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_to_various_various_struct4 + Statistics: Num rows: 4 Data size: 4892 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), b (type: string), s3 (type: struct) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 4 Data size: 3736 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 4 Data size: 3736 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Input: default@part_add_to_various_various_struct4@part=1 +PREHOOK: Input: default@part_add_to_various_various_struct4@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Input: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: Input: default@part_add_to_various_various_struct4@part=2 +#### A masked pattern was here #### +insert_num part b s3 +1 1 original {"c1":true,"c2":null,"c3":null,"c4":3244222,"c5":-99999999999,"c6":null,"c7":null,"c8":null,"c9":null,"c10":null,"c11":null,"c12":null,"c13":null} +2 1 original {"c1":null,"c2":100,"c3":null,"c4":14,"c5":-23866739993,"c6":null,"c7":null,"c8":null,"c9":null,"c10":null,"c11":null,"c12":null,"c13":null} +3 2 new {"c1":true,"c2":null,"c3":null,"c4":-100,"c5":953967041,"c6":62.07915,"c7":718.78,"c8":1,"c9":"verdict ","c10":"verdict","c11":null,"c12":null,"c13":n)گ} +4 1 new {"c1":false,"c2":-67,"c3":833,"c4":63993,"c5":1255178165,"c6":905071.0,"c7":-4314.7918,"c8":-1240033819,"c9":"trial ","c10":"trial","c11":null,"c12":"2016-03-07","c13":n)گ} +PREHOOK: query: drop table part_add_to_various_various_struct4 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Output: default@part_add_to_various_various_struct4 +POSTHOOK: query: drop table part_add_to_various_various_struct4 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Output: default@part_add_to_various_various_struct4 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out new file mode 100644 index 0000000..de03004 --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out @@ -0,0 +1,1042 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: CREATE TABLE part_change_various_various_boolean_to_bigint(insert_num int, + c1 TINYINT, c2 SMALLINT, c3 INT, c4 BIGINT, c5 FLOAT, c6 DOUBLE, c7 DECIMAL(38,18), c8 STRING, c9 TIMESTAMP, + c10 BOOLEAN, c11 SMALLINT, c12 INT, c13 BIGINT, c14 FLOAT, c15 DOUBLE, c16 DECIMAL(38,18), c17 STRING, c18 CHAR(25), c19 VARCHAR(25), c20 TIMESTAMP, + c21 BOOLEAN, c22 TINYINT, c23 INT, c24 BIGINT, c25 FLOAT, c26 DOUBLE, c27 DECIMAL(38,18), c28 STRING, c29 CHAR(25), c30 VARCHAR(25), c31 TIMESTAMP, + c32 BOOLEAN, c33 TINYINT, c34 SMALLINT, c35 BIGINT, c36 FLOAT, c37 DOUBLE, c38 DECIMAL(38,18), c39 STRING, c40 CHAR(25), c41 VARCHAR(25), c42 TIMESTAMP, + c43 BOOLEAN, c44 TINYINT, c45 SMALLINT, c46 INT, c47 FLOAT, c48 DOUBLE, c49 DECIMAL(38,18), c50 STRING, c51 CHAR(25), c52 VARCHAR(25), c53 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint +POSTHOOK: query: CREATE TABLE part_change_various_various_boolean_to_bigint(insert_num int, + c1 TINYINT, c2 SMALLINT, c3 INT, c4 BIGINT, c5 FLOAT, c6 DOUBLE, c7 DECIMAL(38,18), c8 STRING, c9 TIMESTAMP, + c10 BOOLEAN, c11 SMALLINT, c12 INT, c13 BIGINT, c14 FLOAT, c15 DOUBLE, c16 DECIMAL(38,18), c17 STRING, c18 CHAR(25), c19 VARCHAR(25), c20 TIMESTAMP, + c21 BOOLEAN, c22 TINYINT, c23 INT, c24 BIGINT, c25 FLOAT, c26 DOUBLE, c27 DECIMAL(38,18), c28 STRING, c29 CHAR(25), c30 VARCHAR(25), c31 TIMESTAMP, + c32 BOOLEAN, c33 TINYINT, c34 SMALLINT, c35 BIGINT, c36 FLOAT, c37 DOUBLE, c38 DECIMAL(38,18), c39 STRING, c40 CHAR(25), c41 VARCHAR(25), c42 TIMESTAMP, + c43 BOOLEAN, c44 TINYINT, c45 SMALLINT, c46 INT, c47 FLOAT, c48 DOUBLE, c49 DECIMAL(38,18), c50 STRING, c51 CHAR(25), c52 VARCHAR(25), c53 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint +PREHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, boolean_str, timestamp1, + boolean1, smallint1, int1, bigint1, float1, double1, decimal1, tinyint_str, tinyint_str, tinyint_str, timestamp1, + boolean1, tinyint1, int1, bigint1, float1, double1, decimal1, smallint_str, smallint_str, smallint_str, timestamp1, + boolean1, tinyint1, smallint1, bigint1, float1, double1, decimal1, int_str, int_str, int_str, timestamp1, + boolean1, tinyint1, smallint1, int1, float1, double1, decimal1, bigint_str, bigint_str, bigint_str, timestamp1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, boolean_str, timestamp1, + boolean1, smallint1, int1, bigint1, float1, double1, decimal1, tinyint_str, tinyint_str, tinyint_str, timestamp1, + boolean1, tinyint1, int1, bigint1, float1, double1, decimal1, smallint_str, smallint_str, smallint_str, timestamp1, + boolean1, tinyint1, smallint1, bigint1, float1, double1, decimal1, int_str, int_str, int_str, timestamp1, + boolean1, tinyint1, smallint1, int1, float1, double1, decimal1, bigint_str, bigint_str, bigint_str, timestamp1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c18 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c19 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c21 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c22 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c23 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c24 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c25 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c26 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c27 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c28 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c29 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c30 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c31 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c32 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c33 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c34 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c35 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c36 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c37 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c38 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c39 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c40 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c41 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c42 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c43 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c44 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c45 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c46 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c47 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c48 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c49 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c50 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c51 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c52 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c53 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 _col22 _col23 _col24 _col25 _col26 _col27 _col28 _col29 _col30 _col31 _col32 _col33 _col34 _col35 _col36 _col37 _col38 _col39 _col40 _col41 _col42 _col43 _col44 _col45 _col46 _col47 _col48 _col49 _col50 _col51 _col52 _col53 _col54 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 c34 c35 c36 c37 c38 c39 c40 c41 c42 c43 c44 c45 c46 c47 c48 c49 c50 c51 c52 c53 b +101 1 -128 NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 true 6229-06-28 02:54:28.970117179 true NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 -128 -128 -128 6229-06-28 02:54:28.970117179 true -128 -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 –32768 –32768 –32768 6229-06-28 02:54:28.970117179 true -128 NULL NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 -2147483648 -2147483648 -2147483648 6229-06-28 02:54:28.970117179 true -128 NULL -2147483648 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 –9223372036854775808 –9223372036854775808 –9223372036854775808 6229-06-28 02:54:28.970117179 original +102 1 127 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 false 5966-07-09 03:30:50.597 false 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 127 127 127 5966-07-09 03:30:50.597 false 127 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 32767 32767 32767 5966-07-09 03:30:50.597 false 127 32767 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 2147483647 2147483647 2147483647 5966-07-09 03:30:50.597 false 127 32767 2147483647 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 9223372036854775807 9223372036854775807 9223372036854775807 5966-07-09 03:30:50.597 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 true 1978-08-02 06:34:14 true 834 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 23 23 23 1978-08-02 06:34:14 true 23 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 834 834 834 1978-08-02 06:34:14 true 23 834 888888857923222 -100.35978 30.774 66475.561431000000000000 203332 203332 203332 1978-08-02 06:34:14 true 23 834 203332 -100.35978 30.774 66475.561431000000000000 888888857923222 888888857923222 888888857923222 1978-08-02 06:34:14 original +105 1 -99 -28300 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 false 1991-01-06 16:20:39.72036854 false -28300 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 -99 -99 -99 1991-01-06 16:20:39.72036854 false -99 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 -28300 -28300 -28300 1991-01-06 16:20:39.72036854 false -99 -28300 -222282153733 NULL 46114.28 9250340.750000000000000000 -999992 -999992 -999992 1991-01-06 16:20:39.72036854 false -99 -28300 -999992 NULL 46114.28 9250340.750000000000000000 -222282153733 -222282153733 -222282153733 1991-01-06 16:20:39.72036854 original +PREHOOK: query: alter table part_change_various_various_boolean_to_bigint replace columns (insert_num int, + c1 BOOLEAN, c2 BOOLEAN, c3 BOOLEAN, c4 BOOLEAN, c5 BOOLEAN, c6 BOOLEAN, c7 BOOLEAN, c8 BOOLEAN, c9 BOOLEAN, + c10 TINYINT, c11 TINYINT, c12 TINYINT, c13 TINYINT, c14 TINYINT, c15 TINYINT, c16 TINYINT, c17 TINYINT, c18 TINYINT, c19 TINYINT, c20 TINYINT, + c21 SMALLINT, c22 SMALLINT, c23 SMALLINT, c24 SMALLINT, c25 SMALLINT, c26 SMALLINT, c27 SMALLINT, c28 SMALLINT, c29 SMALLINT, c30 SMALLINT, c31 SMALLINT, + c32 INT, c33 INT, c34 INT, c35 INT, c36 INT, c37 INT, c38 INT, c39 INT, c40 INT, c41 INT, c42 INT, + c43 BIGINT, c44 BIGINT, c45 BIGINT, c46 BIGINT, c47 BIGINT, c48 BIGINT, c49 BIGINT, c50 BIGINT, c51 BIGINT, c52 BIGINT, c53 BIGINT, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint +POSTHOOK: query: alter table part_change_various_various_boolean_to_bigint replace columns (insert_num int, + c1 BOOLEAN, c2 BOOLEAN, c3 BOOLEAN, c4 BOOLEAN, c5 BOOLEAN, c6 BOOLEAN, c7 BOOLEAN, c8 BOOLEAN, c9 BOOLEAN, + c10 TINYINT, c11 TINYINT, c12 TINYINT, c13 TINYINT, c14 TINYINT, c15 TINYINT, c16 TINYINT, c17 TINYINT, c18 TINYINT, c19 TINYINT, c20 TINYINT, + c21 SMALLINT, c22 SMALLINT, c23 SMALLINT, c24 SMALLINT, c25 SMALLINT, c26 SMALLINT, c27 SMALLINT, c28 SMALLINT, c29 SMALLINT, c30 SMALLINT, c31 SMALLINT, + c32 INT, c33 INT, c34 INT, c35 INT, c36 INT, c37 INT, c38 INT, c39 INT, c40 INT, c41 INT, c42 INT, + c43 BIGINT, c44 BIGINT, c45 BIGINT, c46 BIGINT, c47 BIGINT, c48 BIGINT, c49 BIGINT, c50 BIGINT, c51 BIGINT, c52 BIGINT, c53 BIGINT, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint +PREHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, + bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, + 'new' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, + bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, + 'new' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c21 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c22 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c23 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c24 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c25 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c26 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c27 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c28 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c29 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c30 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c31 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c32 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c33 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c34 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c35 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c36 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c37 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c38 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c39 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c40 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c41 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c42 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c43 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c44 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c45 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c46 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c47 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c48 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c49 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c50 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c51 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c52 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c53 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 int1 int1 int1 int1 int1 int1 int1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 _c54 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_boolean_to_bigint + Statistics: Num rows: 10 Data size: 1228 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: boolean), c2 (type: boolean), c3 (type: boolean), c4 (type: boolean), c5 (type: boolean), c6 (type: boolean), c7 (type: boolean), c8 (type: boolean), c9 (type: boolean), c10 (type: tinyint), c11 (type: tinyint), c12 (type: tinyint), c13 (type: tinyint), c14 (type: tinyint), c15 (type: tinyint), c16 (type: tinyint), c17 (type: tinyint), c18 (type: tinyint), c19 (type: tinyint), c20 (type: tinyint), c21 (type: smallint), c22 (type: smallint), c23 (type: smallint), c24 (type: smallint), c25 (type: smallint), c26 (type: smallint), c27 (type: smallint), c28 (type: smallint), c29 (type: smallint), c30 (type: smallint), c31 (type: smallint), c32 (type: int), c33 (type: int), c34 (type: int), c35 (type: int), c36 (type: int), c37 (type: int), c38 (type: int), c39 (type: int), c40 (type: int), c41 (type: int), c42 (type: int), c43 (type: bigint), c44 (type: bigint), c45 (type: bigint), c46 (type: bigint), c47 (type: bigint), c48 (type: bigint), c49 (type: bigint), c50 (type: bigint), c51 (type: bigint), c52 (type: bigint), c53 (type: bigint), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44, _col45, _col46, _col47, _col48, _col49, _col50, _col51, _col52, _col53, _col54, _col55 + Statistics: Num rows: 10 Data size: 1228 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 10 Data size: 1228 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 c34 c35 c36 c37 c38 c39 c40 c41 c42 c43 c44 c45 c46 c47 c48 c49 c50 c51 c52 c53 b +101 1 true NULL true NULL NULL NULL true NULL true 1 NULL NULL NULL NULL NULL NULL -128 -128 -128 NULL 1 -128 NULL NULL NULL NULL NULL NULL NULL NULL NULL 1 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 NULL 1 -128 NULL -2147483648 NULL NULL NULL NULL NULL NULL 134416490068 original +101 1 true true true true true true true true true -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +102 1 false false false false false false false false false 127 127 127 127 127 127 127 127 127 127 127 32767 32767 32767 32767 32767 32767 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 new +102 1 true true true true NULL NULL true NULL true 0 NULL NULL NULL NULL NULL NULL 127 127 127 NULL 0 127 NULL NULL NULL NULL NULL 32767 32767 32767 NULL 0 127 32767 NULL NULL NULL NULL 2147483647 2147483647 2147483647 NULL 0 127 32767 2147483647 NULL NULL NULL 9223372036854775807 9223372036854775807 9223372036854775807 126117945050 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 true true true true true true true NULL true 1 NULL NULL NULL -100 30 NULL 23 23 23 NULL 1 23 NULL NULL -100 30 NULL 834 834 834 NULL 1 23 834 NULL -100 30 66475 203332 203332 203332 270912854 1 23 834 203332 -100 30 66475 888888857923222 888888857923222 888888857923222 270912854 original +104 1 true true true true true true true true true 23 23 23 23 23 23 23 23 23 23 23 834 834 834 834 834 834 834 834 834 834 834 203332 203332 203332 203332 203332 203332 203332 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 new +105 1 false false false false false false false false false -99 -99 -99 -99 -99 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -28300 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -999992 -999992 -999992 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 new +105 1 true true true true NULL true true NULL true 0 NULL NULL NULL NULL NULL NULL -99 -99 -99 NULL 0 -99 NULL NULL NULL NULL NULL -28300 -28300 -28300 NULL 0 -99 -28300 NULL NULL 46114 9250340 -999992 -999992 -999992 663207639 0 -99 -28300 -999992 NULL 46114 9250340 -222282153733 -222282153733 -222282153733 663207639 original +PREHOOK: query: drop table part_change_various_various_boolean_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint +POSTHOOK: query: drop table part_change_various_various_boolean_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint +PREHOOK: query: CREATE TABLE part_change_various_various_decimal_to_double(insert_num int, + c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 STRING, c9 CHAR(25), c10 VARCHAR(25), c11 TIMESTAMP, + c12 BOOLEAN, c13 TINYINT, c14 SMALLINT, c15 INT, c16 BIGINT, c17 DECIMAL(38,18), c18 DOUBLE, c19 STRING, c20 CHAR(25), c21 VARCHAR(25), c22 TIMESTAMP, + c23 BOOLEAN, c24 TINYINT, c25 SMALLINT, c26 INT, c27 BIGINT, c28 DECIMAL(38,18), c29 FLOAT, c30 STRING, c31 CHAR(25), c32 VARCHAR(25), c33 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_decimal_to_double +POSTHOOK: query: CREATE TABLE part_change_various_various_decimal_to_double(insert_num int, + c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 STRING, c9 CHAR(25), c10 VARCHAR(25), c11 TIMESTAMP, + c12 BOOLEAN, c13 TINYINT, c14 SMALLINT, c15 INT, c16 BIGINT, c17 DECIMAL(38,18), c18 DOUBLE, c19 STRING, c20 CHAR(25), c21 VARCHAR(25), c22 TIMESTAMP, + c23 BOOLEAN, c24 TINYINT, c25 SMALLINT, c26 INT, c27 BIGINT, c28 DECIMAL(38,18), c29 FLOAT, c30 STRING, c31 CHAR(25), c32 VARCHAR(25), c33 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_decimal_to_double +PREHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal_str, decimal_str, decimal_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, double1, float_str, float_str, float_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, float1, double_str, double_str, double_str, timestamp1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal_str, decimal_str, decimal_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, double1, float_str, float_str, float_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, float1, double_str, double_str, double_str, timestamp1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c20 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c21 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c22 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c23 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c24 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c25 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c26 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c27 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c28 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c29 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c30 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c31 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c32 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c33 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 _col22 _col23 _col24 _col25 _col26 _col27 _col28 _col29 _col30 _col31 _col32 _col33 _col34 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 b +101 1 true -128 NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 99999999999999999999.9999 99999999999999999999.9999 6229-06-28 02:54:28.970117179 true -128 NULL -2147483648 NULL 99999999999999999999.999999999999999999 1.7976931348623157E308 340282347000000000000000000000000000000000 3402823470000000000000000 3402823470000000000000000 6229-06-28 02:54:28.970117179 true -128 NULL -2147483648 NULL 99999999999999999999.999999999999999999 Infinity 1.79769313486231570E+308 1.79769313486231570E+308 1.79769313486231570E+308 6229-06-28 02:54:28.970117179 original +102 1 false 127 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -99999999999999999999.999 -99999999999999999999.999 5966-07-09 03:30:50.597 false 127 32767 2147483647 9223372036854775807 -99999999999999999999.999999999999999999 -1.7976931348623157E308 -340282347000000000000000000000000000000000 -340282347000000000000000 -340282347000000000000000 5966-07-09 03:30:50.597 false 127 32767 2147483647 9223372036854775807 -99999999999999999999.999999999999999999 -Infinity -1.79769313486231570E+308 -1.79769313486231570E+308 -1.79769313486231570E+308 5966-07-09 03:30:50.597 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 true 23 834 203332 888888857923222 -100.35978 30.774 66475.561431 66475.561431 66475.561431 1978-08-02 06:34:14 true 23 834 203332 888888857923222 66475.561431000000000000 30.774 -100.3597812 -100.3597812 -100.3597812 1978-08-02 06:34:14 true 23 834 203332 888888857923222 66475.561431000000000000 -100.35978 30.774 30.774 30.774 1978-08-02 06:34:14 original +105 1 false -99 -28300 -999992 -222282153733 NULL 46114.28 9250340.75 9250340.75 9250340.75 1991-01-06 16:20:39.72036854 false -99 -28300 -999992 -222282153733 9250340.750000000000000000 46114.28 –32768 –32768 –32768 1991-01-06 16:20:39.72036854 false -99 -28300 -999992 -222282153733 9250340.750000000000000000 NULL 46114.28 46114.28 46114.28 1991-01-06 16:20:39.72036854 original +PREHOOK: query: alter table part_change_various_various_decimal_to_double replace columns (insert_num int, + c1 DECIMAL(38,18), c2 DECIMAL(38,18), c3 DECIMAL(38,18), c4 DECIMAL(38,18), c5 DECIMAL(38,18), c6 DECIMAL(38,18), c7 DECIMAL(38,18), c8 DECIMAL(38,18), c9 DECIMAL(38,18), c10 DECIMAL(38,18), c11 DECIMAL(38,18), + c12 FLOAT, c13 FLOAT, c14 FLOAT, c15 FLOAT, c16 FLOAT, c17 FLOAT, c18 FLOAT, c19 FLOAT, c20 FLOAT, c21 FLOAT, c22 FLOAT, + c23 DOUBLE, c24 DOUBLE, c25 DOUBLE, c26 DOUBLE, c27 DOUBLE, c28 DOUBLE, c29 DOUBLE, c30 DOUBLE, c31 DOUBLE, c32 DOUBLE, c33 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Output: default@part_change_various_various_decimal_to_double +POSTHOOK: query: alter table part_change_various_various_decimal_to_double replace columns (insert_num int, + c1 DECIMAL(38,18), c2 DECIMAL(38,18), c3 DECIMAL(38,18), c4 DECIMAL(38,18), c5 DECIMAL(38,18), c6 DECIMAL(38,18), c7 DECIMAL(38,18), c8 DECIMAL(38,18), c9 DECIMAL(38,18), c10 DECIMAL(38,18), c11 DECIMAL(38,18), + c12 FLOAT, c13 FLOAT, c14 FLOAT, c15 FLOAT, c16 FLOAT, c17 FLOAT, c18 FLOAT, c19 FLOAT, c20 FLOAT, c21 FLOAT, c22 FLOAT, + c23 DOUBLE, c24 DOUBLE, c25 DOUBLE, c26 DOUBLE, c27 DOUBLE, c28 DOUBLE, c29 DOUBLE, c30 DOUBLE, c31 DOUBLE, c32 DOUBLE, c33 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Output: default@part_change_various_various_decimal_to_double +PREHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, + float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, + double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, + 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, + float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, + double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, + 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c10 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c11 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c12 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c13 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c14 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c15 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c16 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c17 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c18 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c19 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c2 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c20 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c21 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c22 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c23 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c24 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c25 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c26 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c27 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c28 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c29 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c3 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c30 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c31 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c32 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c33 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c4 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c5 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c6 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c7 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c8 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c9 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 float1 float1 float1 float1 float1 float1 float1 float1 float1 float1 float1 double1 double1 double1 double1 double1 double1 double1 double1 double1 double1 double1 _c34 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_decimal_to_double + Statistics: Num rows: 6 Data size: 1320 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: decimal(38,18)), c2 (type: decimal(38,18)), c3 (type: decimal(38,18)), c4 (type: decimal(38,18)), c5 (type: decimal(38,18)), c6 (type: decimal(38,18)), c7 (type: decimal(38,18)), c8 (type: decimal(38,18)), c9 (type: decimal(38,18)), c10 (type: decimal(38,18)), c11 (type: decimal(38,18)), c12 (type: float), c13 (type: float), c14 (type: float), c15 (type: float), c16 (type: float), c17 (type: float), c18 (type: float), c19 (type: float), c20 (type: float), c21 (type: float), c22 (type: float), c23 (type: double), c24 (type: double), c25 (type: double), c26 (type: double), c27 (type: double), c28 (type: double), c29 (type: double), c30 (type: double), c31 (type: double), c32 (type: double), c33 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35 + Statistics: Num rows: 6 Data size: 1320 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 1320 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 b +101 1 1.000000000000000000 -128.000000000000000000 NULL -2147483648.000000000000000000 NULL NULL NULL 99999999999999999999.999999999999999999 99999999999999999999.999900000000000000 99999999999999999999.999900000000000000 134416490068.970120000000000000 1.0 -128.0 NULL -2.14748365E9 NULL 1.0E20 Infinity Infinity 3.4028236E24 3.4028236E24 1.3441649E11 1.0 -128.0 NULL -2.147483648E9 NULL 1.0E20 Infinity 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 1.3441649006897012E11 original +102 1 0.000000000000000000 127.000000000000000000 32767.000000000000000000 2147483647.000000000000000000 9223372036854775807.000000000000000000 NULL NULL -99999999999999999999.999999999999999999 -99999999999999999999.999000000000000000 -99999999999999999999.999000000000000000 126117945050.597000000000000000 0.0 127.0 32767.0 2.14748365E9 9.223372E18 -1.0E20 -Infinity -Infinity -3.4028233E23 -3.4028233E23 1.26117945E11 0.0 127.0 32767.0 2.147483647E9 9.223372036854776E18 -1.0E20 -Infinity -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 1.26117945050597E11 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 1.000000000000000000 23.000000000000000000 834.000000000000000000 203332.000000000000000000 888888857923222.000000000000000000 -100.359780000000000000 30.774000000000000000 66475.561431000000000000 66475.561431000000000000 66475.561431000000000000 270912854.000000000000000000 1.0 23.0 834.0 203332.0 8.8888885E14 66475.56 30.774 -100.35978 -100.35978 -100.35978 2.70912864E8 1.0 23.0 834.0 203332.0 8.88888857923222E14 66475.561431 -100.35978 30.774 30.774 30.774 2.70912854E8 original +105 1 0.000000000000000000 -99.000000000000000000 -28300.000000000000000000 -999992.000000000000000000 -222282153733.000000000000000000 NULL 46114.280000000000000000 9250340.750000000000000000 9250340.750000000000000000 9250340.750000000000000000 663207639.720368500000000000 0.0 -99.0 -28300.0 -999992.0 -2.22282154E11 9250341.0 46114.28 NULL NULL NULL 6.6320762E8 0.0 -99.0 -28300.0 -999992.0 -2.22282153733E11 9250340.75 NULL 46114.28 46114.28 46114.28 6.632076397203685E8 original +111 1 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 new +PREHOOK: query: drop table part_change_various_various_decimal_to_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Output: default@part_change_various_various_decimal_to_double +POSTHOOK: query: drop table part_change_various_various_decimal_to_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Output: default@part_change_various_various_decimal_to_double +PREHOOK: query: CREATE TABLE part_change_various_various_timestamp(insert_num int, c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 DECIMAL(38,18), c9 STRING, c10 CHAR(25), c11 VARCHAR(25), c12 DATE, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_timestamp +POSTHOOK: query: CREATE TABLE part_change_various_various_timestamp(insert_num int, c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 DECIMAL(38,18), c9 STRING, c10 CHAR(25), c11 VARCHAR(25), c12 DATE, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_timestamp +PREHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, timestamp_str, timestamp_str, timestamp_str, date1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, timestamp_str, timestamp_str, timestamp_str, date1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c11 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 b +101 1 true -128 NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.97011 6229-06-28 02:54:28.97011 1950-12-18 original +102 1 false 127 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 2049-12-18 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 true 23 834 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:34:14 2021-09-24 original +105 1 false -99 -28300 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036 1991-01-06 16:20:39.72036 2024-11-11 original +PREHOOK: query: alter table part_change_various_various_timestamp replace columns (insert_num int, c1 TIMESTAMP, c2 TIMESTAMP, c3 TIMESTAMP, c4 TIMESTAMP, c5 TIMESTAMP, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, c11 TIMESTAMP, c12 TIMESTAMP, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Output: default@part_change_various_various_timestamp +POSTHOOK: query: alter table part_change_various_various_timestamp replace columns (insert_num int, c1 TIMESTAMP, c2 TIMESTAMP, c3 TIMESTAMP, c4 TIMESTAMP, c5 TIMESTAMP, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, c11 TIMESTAMP, c12 TIMESTAMP, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Output: default@part_change_various_various_timestamp +PREHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c1 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c10 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c11 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c12 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c2 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c3 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c4 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c5 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c6 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c7 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c8 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c9 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c13 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_timestamp + Statistics: Num rows: 6 Data size: 6973 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: timestamp), c2 (type: timestamp), c3 (type: timestamp), c4 (type: timestamp), c5 (type: timestamp), c6 (type: timestamp), c7 (type: timestamp), c8 (type: timestamp), c9 (type: timestamp), c10 (type: timestamp), c11 (type: timestamp), c12 (type: timestamp), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14 + Statistics: Num rows: 6 Data size: 4032 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 4032 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 b +101 1 1969-12-31 16:00:00.001 1969-12-31 15:59:59.872 NULL 1969-12-06 19:28:36.352 NULL NULL NULL NULL 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.97011 6229-06-28 02:54:28.97011 1950-12-18 00:00:00 original +102 1 1969-12-31 16:00:00 1969-12-31 16:00:00.127 1969-12-31 16:00:32.767 1970-01-25 12:31:23.647 NULL NULL 1969-12-31 16:00:00 NULL 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 2049-12-18 00:00:00 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 1969-12-31 16:00:00.001 1969-12-31 16:00:00.023 1969-12-31 16:00:00.834 1969-12-31 16:03:23.332 NULL 1969-12-31 15:58:19.640220643 1969-12-31 16:00:30.774 1970-01-01 10:27:55.561431 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:34:14 2021-09-24 00:00:00 original +105 1 1969-12-31 16:00:00 1969-12-31 15:59:59.901 1969-12-31 15:59:31.7 1969-12-31 15:43:20.008 1962-12-15 22:57:26.267 NULL 1970-01-01 04:48:34.28 1970-04-17 17:32:20.75 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036 1991-01-06 16:20:39.72036 2024-11-11 00:00:00 original +111 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +PREHOOK: query: drop table part_change_various_various_timestamp +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Output: default@part_change_various_various_timestamp +POSTHOOK: query: drop table part_change_various_various_timestamp +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Output: default@part_change_various_various_timestamp +PREHOOK: query: CREATE TABLE part_change_various_various_date(insert_num int, c1 STRING, c2 CHAR(25), c3 VARCHAR(25), c4 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_date +POSTHOOK: query: CREATE TABLE part_change_various_various_date(insert_num int, c1 STRING, c2 CHAR(25), c3 VARCHAR(25), c4 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_date +PREHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date_str, date_str, date_str, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date_str, date_str, date_str, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 b +101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 original +102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 original +103 1 NULL original +104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 original +105 1 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 original +PREHOOK: query: alter table part_change_various_various_date replace columns (insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Output: default@part_change_various_various_date +POSTHOOK: query: alter table part_change_various_various_date replace columns (insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Output: default@part_change_various_various_date +PREHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date1, date1, date1, date1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date1, date1, date1, date1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c1 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c2 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c3 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c4 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 _c5 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_date + Statistics: Num rows: 6 Data size: 2496 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: date), c2 (type: date), c3 (type: date), c4 (type: date), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 6 Data size: 2496 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 2496 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 b +101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-28 original +102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-09 original +103 1 NULL NULL NULL NULL original +104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-02 original +105 1 2024-11-11 2024-11-11 2024-11-11 1991-01-06 original +111 1 1964-01-24 1964-01-24 1964-01-24 1964-01-24 new +PREHOOK: query: drop table part_change_various_various_date +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Output: default@part_change_various_various_date +POSTHOOK: query: drop table part_change_various_various_date +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Output: default@part_change_various_various_date +PREHOOK: query: CREATE TABLE part_change_same_type_different_params(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_same_type_different_params +POSTHOOK: query: CREATE TABLE part_change_same_type_different_params(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_same_type_different_params +PREHOOK: query: CREATE TABLE same_type1_a_txt(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) +row format delimited fields terminated by '|' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@same_type1_a_txt +POSTHOOK: query: CREATE TABLE same_type1_a_txt(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) +row format delimited fields terminated by '|' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@same_type1_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_a.txt' overwrite into table same_type1_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@same_type1_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_a.txt' overwrite into table same_type1_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@same_type1_a_txt +PREHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@same_type1_a_txt +PREHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@same_type1_a_txt +POSTHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).b SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c1 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c1, type:char(12), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c2 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c2, type:char(25), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c3 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c3, type:varchar(25), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c4 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c4, type:varchar(10), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c5 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c5, type:decimal(12,4), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c6 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c6, type:decimal(20,10), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).insert_num SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +same_type1_a_txt.insert_num same_type1_a_txt.c1 same_type1_a_txt.c2 same_type1_a_txt.c3 same_type1_a_txt.c4 same_type1_a_txt.c5 same_type1_a_txt.c6 same_type1_a_txt.b +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Input: default@part_change_same_type_different_params@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Input: default@part_change_same_type_different_params@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 b +1 1 excess corporation believable gateway thumb repe 76855494.3900 NULL original +2 1 mutation hideout authentic blankness sonic disfigure hover know 42093605.0000 3182652.4066800000 original +3 1 hibernation country detox coyotes ball pulse candle direct 1413570.3000 8.3095808155 original +4 1 aquarium encounter existent amuse fuse light headlock bumper adm 5475414.6500 25937.6752563004 original +5 1 123456789012 1234567890123456789012345 1234567890123456789012345 1234567890 12345678.1234 1234567890.0987654321 original +PREHOOK: query: alter table part_change_same_type_different_params replace columns (insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Output: default@part_change_same_type_different_params +POSTHOOK: query: alter table part_change_same_type_different_params replace columns (insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Output: default@part_change_same_type_different_params +PREHOOK: query: CREATE TABLE same_type1_b_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@same_type1_b_txt +POSTHOOK: query: CREATE TABLE same_type1_b_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@same_type1_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_b.txt' overwrite into table same_type1_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@same_type1_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_b.txt' overwrite into table same_type1_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@same_type1_b_txt +PREHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@same_type1_b_txt +PREHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@same_type1_b_txt +POSTHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).b SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c1 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c1, type:char(8), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c2 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c2, type:char(32), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c3 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c3, type:varchar(15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c4 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c4, type:varchar(18), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c5 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c5, type:decimal(10,2), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c6 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c6, type:decimal(25,15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).insert_num SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +same_type1_b_txt.insert_num same_type1_b_txt.c1 same_type1_b_txt.c2 same_type1_b_txt.c3 same_type1_b_txt.c4 same_type1_b_txt.c5 same_type1_b_txt.c6 same_type1_b_txt.b +PREHOOK: query: CREATE TABLE same_type1_c_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@same_type1_c_txt +POSTHOOK: query: CREATE TABLE same_type1_c_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@same_type1_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_c.txt' overwrite into table same_type1_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@same_type1_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_c.txt' overwrite into table same_type1_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@same_type1_c_txt +PREHOOK: query: insert into table part_change_same_type_different_params partition(part=2) select * from same_type1_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@same_type1_c_txt +PREHOOK: Output: default@part_change_same_type_different_params@part=2 +POSTHOOK: query: insert into table part_change_same_type_different_params partition(part=2) select * from same_type1_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@same_type1_c_txt +POSTHOOK: Output: default@part_change_same_type_different_params@part=2 +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).b SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c1 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c1, type:char(8), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c2 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c2, type:char(32), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c3 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c3, type:varchar(15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c4 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c4, type:varchar(18), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c5 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c5, type:decimal(10,2), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c6 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c6, type:decimal(25,15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).insert_num SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +same_type1_c_txt.insert_num same_type1_c_txt.c1 same_type1_c_txt.c2 same_type1_c_txt.c3 same_type1_c_txt.c4 same_type1_c_txt.c5 same_type1_c_txt.c6 same_type1_c_txt.b +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_same_type_different_params + Statistics: Num rows: 13 Data size: 9347 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: char(8)), c2 (type: char(32)), c3 (type: varchar(15)), c4 (type: varchar(18)), c5 (type: decimal(10,2)), c6 (type: decimal(25,15)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 + Statistics: Num rows: 13 Data size: 9347 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 13 Data size: 9347 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Input: default@part_change_same_type_different_params@part=1 +PREHOOK: Input: default@part_change_same_type_different_params@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Input: default@part_change_same_type_different_params@part=1 +POSTHOOK: Input: default@part_change_same_type_different_params@part=2 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 b +1 1 excess corporation believable gateway thumb repe 76855494.39 NULL original +10 2 dreamlan raster devastation association boa lubricant century 5830.99 NULL new +11 2 tiger however center propeller agoni shocking misshapen 6946533.80 31608910.030000000000000 new +12 2 bulgin extensive ambivalent rastled billion zeroes 943084.30 3090297719.717120349328271 new +13 2 12345678 12345678901234567890123456789012 123456789012345 123456789012345678 NULL 1234567890.543210987654321 new +2 1 mutation hideout authentic blankness sonic hover know 42093605.00 3182652.406680000000000 original +3 1 hibernat country detox coyotes ball pulse cand direct 1413570.30 8.309580815500000 original +4 1 aquarium encounter existent amuse fuse light head bumper adm 5475414.65 25937.675256300400000 original +5 1 12345678 1234567890123456789012345 123456789012345 1234567890 12345678.12 1234567890.098765432100000 original +6 1 hollow innocent crabs blushing ambition ebony liquor ageless par NULL 3841833197.314137090000000 new +7 1 wig feel social fork drum search bump conclusion percept 8.31 NULL new +8 1 bubble f drain loyal station racket antique business 0.69 22.832613278741300 new +9 1 12345678 12345678901234567890123456789012 123456789012345 123456789012345678 NULL 1234567890.543210987654321 new +PREHOOK: query: drop table part_change_same_type_different_params +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Output: default@part_change_same_type_different_params +POSTHOOK: query: drop table part_change_same_type_different_params +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Output: default@part_change_same_type_different_params diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out new file mode 100644 index 0000000..2163aa6 --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out @@ -0,0 +1,1474 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 +PREHOOK: query: alter table part_add_int_permute_select add columns(c int) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: alter table part_add_int_permute_select add columns(c int) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).c SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 col4 +PREHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_int_permute_select + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), part (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,a,b from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b +1 1 1111 new +2 1 2222 new +PREHOOK: query: select insert_num,part,a,b,c from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c +1 1 1111 new NULL +2 1 2222 new 3333 +PREHOOK: query: select insert_num,part,c from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part c +1 1 NULL +2 1 3333 +PREHOOK: query: drop table part_add_int_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: drop table part_add_int_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 +PREHOOK: query: alter table part_add_int_string_permute_select add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: alter table part_add_int_string_permute_select add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).c SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).d SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 col4 col5 +PREHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_string_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_string_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_int_string_permute_select + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), part (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,a,b from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b +1 1 1111 new +2 1 2222 new +PREHOOK: query: select insert_num,part,a,b,c from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c +1 1 1111 new NULL +2 1 2222 new 3333 +PREHOOK: query: select insert_num,part,a,b,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c d +1 1 1111 new NULL NULL +2 1 2222 new 3333 4444 +PREHOOK: query: select insert_num,part,a,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a c d +1 1 1111 NULL NULL +2 1 2222 3333 4444 +PREHOOK: query: select insert_num,part,a,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a d +1 1 1111 NULL +2 1 2222 4444 +PREHOOK: query: select insert_num,part,c from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part c +1 1 NULL +2 1 3333 +PREHOOK: query: select insert_num,part,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part d +1 1 NULL +2 1 4444 +PREHOOK: query: drop table part_add_int_string_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: drop table part_add_int_string_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num double1 double1 double1 _c4 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_string_group_double + Statistics: Num rows: 5 Data size: 500 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 5 Data size: 500 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 500 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_string_group_double +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Input: default@part_change_string_group_double@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_string_group_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Input: default@part_change_string_group_double@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 original +102 1 -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 original +103 1 NULL NULL NULL original +104 1 30.774 30.774 30.774 original +105 1 46114.28 46114.28 46114.28 original +PREHOOK: query: drop table part_change_string_group_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: drop table part_change_string_group_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 date1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c11 +PREHOOK: query: alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_date_group_string_group_date_timestamp + Statistics: Num rows: 6 Data size: 12449 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), c6 (type: string), c7 (type: char(50)), c8 (type: char(15)), c9 (type: varchar(50)), c10 (type: varchar(15)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 + Statistics: Num rows: 6 Data size: 8952 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 8952 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original +102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14.0 1978-08-02 06:34:14.0 1978-08-02 06:3 1978-08-02 06:34:14.0 1978-08-02 06:3 original +105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original +111 1 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: drop table part_change_date_group_string_group_date_timestamp +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 _c21 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 1 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL original +102 1 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 original +105 1 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 original +PREHOOK: query: alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c16 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c17 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c18 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c19 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c20 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_numeric_group_string_group_multi_ints_string_group + Statistics: Num rows: 6 Data size: 1140 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(50)), c8 (type: char(50)), c9 (type: char(5)), c10 (type: char(5)), c11 (type: char(5)), c12 (type: char(5)), c13 (type: varchar(50)), c14 (type: varchar(50)), c15 (type: varchar(50)), c16 (type: varchar(50)), c17 (type: varchar(5)), c18 (type: varchar(5)), c19 (type: varchar(5)), c20 (type: varchar(5)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22 + Statistics: Num rows: 6 Data size: 1140 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 1140 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 1 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL original +102 1 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 20333 88888 23 834 203332 888888857923222 23 834 20333 88888 original +105 1 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 original +111 1 filler filler filler filler filler filler filler filler fille fille fille fille filler filler filler filler fille fille fille fille new +PREHOOK: query: drop table part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: drop table part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 _c16 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 original +102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 original +105 1 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 original +PREHOOK: query: alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_numeric_group_string_group_floating_string_group + Statistics: Num rows: 6 Data size: 3300 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: char(50)), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(7)), c8 (type: char(7)), c9 (type: char(7)), c10 (type: varchar(50)), c11 (type: varchar(50)), c12 (type: varchar(50)), c13 (type: varchar(7)), c14 (type: varchar(7)), c15 (type: varchar(7)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17 + Statistics: Num rows: 6 Data size: 3300 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 3300 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original +102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original +105 1 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original +111 1 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c5 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c6 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c7 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c8 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 b +101 1 freckled freckled freckled freckled original +102 1 ox ox ox ox original +103 1 original +104 1 I cooked I cooked I cooked I cooked original +105 1 200 200 200 200 original +PREHOOK: query: alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_string_group_string_group_string + Statistics: Num rows: 6 Data size: 2712 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: varchar(50)), c4 (type: char(9)), c5 (type: varchar(50)), c6 (type: varchar(9)), c7 (type: string), c8 (type: char(50)), c9 (type: char(9)), c10 (type: string), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 + Statistics: Num rows: 6 Data size: 2712 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 2712 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1 freckled freckled freckled freckled freckled freckled freckled freckled freckled freckled original +102 1 ox ox ox ox ox ox ox ox ox ox original +103 1 original +104 1 I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked original +105 1 200 200 200 200 200 200 200 200 200 200 original +111 1 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_string_group_string_group_string +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: drop table part_change_string_group_string_group_string +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 bigint1 bigint1 bigint1 _c19 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 1 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL original +102 1 127 127 127 127 127 127 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 23 23 23 23 23 834 834 834 834 834 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 original +105 1 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 original +PREHOOK: query: alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c16 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c17 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c18 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_lower_to_higher_numeric_group_tinyint_to_bigint + Statistics: Num rows: 6 Data size: 1060 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: smallint), c2 (type: int), c3 (type: bigint), c4 (type: decimal(38,18)), c5 (type: float), c6 (type: double), c7 (type: int), c8 (type: bigint), c9 (type: decimal(38,18)), c10 (type: float), c11 (type: double), c12 (type: bigint), c13 (type: decimal(38,18)), c14 (type: float), c15 (type: double), c16 (type: decimal(38,18)), c17 (type: float), c18 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20 + Statistics: Num rows: 6 Data size: 1060 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 1060 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 1 -128 -128 -128 -128.000000000000000000 -128.0 -128.0 NULL NULL NULL NULL NULL -2147483648 -2147483648.000000000000000000 -2.14748365E9 -2.147483648E9 NULL NULL NULL original +102 1 127 127 127 127.000000000000000000 127.0 127.0 32767 32767 32767.000000000000000000 32767.0 32767.0 2147483647 2147483647.000000000000000000 2.14748365E9 2.147483647E9 9223372036854775807.000000000000000000 9.223372E18 9.223372036854776E18 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 23 23 23.000000000000000000 23.0 23.0 834 834 834.000000000000000000 834.0 834.0 203332 203332.000000000000000000 203332.0 203332.0 888888857923222.000000000000000000 8.8888885E14 8.88888857923222E14 original +105 1 -99 -99 -99 -99.000000000000000000 -99.0 -99.0 -28300 -28300 -28300.000000000000000000 -28300.0 -28300.0 -999992 -999992.000000000000000000 -999992.0 -999992.0 -222282153733.000000000000000000 -2.22282154E11 -2.22282153733E11 original +111 1 7000 80000 90000000 1234.567800000000000000 9876.543 789.321 80000 90000000 1234.567800000000000000 9876.543 789.321 90000000 1234.567800000000000000 9876.543 789.321 1234.567800000000000000 9876.543 789.321 new +PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 float1 _c4 +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 99999999999999999999.999999999999999999 99999999999999999999.999999999999999999 Infinity original +102 1 -99999999999999999999.999999999999999999 -99999999999999999999.999999999999999999 -Infinity original +103 1 NULL NULL NULL original +104 1 66475.561431000000000000 66475.561431000000000000 -100.35978 original +105 1 9250340.750000000000000000 9250340.750000000000000000 NULL original +PREHOOK: query: alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_lower_to_higher_numeric_group_decimal_to_float + Statistics: Num rows: 6 Data size: 648 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: float), c2 (type: double), c3 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 6 Data size: 648 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 648 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 1.0E20 1.0E20 Infinity original +102 1 -1.0E20 -1.0E20 -Infinity original +103 1 NULL NULL NULL original +104 1 66475.56 66475.561431 -100.35978 original +105 1 9250341.0 9250340.75 NULL original +111 1 1234.5677 9876.543 1234.5678 new +PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out new file mode 100644 index 0000000..8b9e82c --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out @@ -0,0 +1,1291 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_add_int_permute_select +POSTHOOK: Lineage: table_add_int_permute_select.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_add_int_permute_select.b SIMPLE [] +POSTHOOK: Lineage: table_add_int_permute_select.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table_add_int_permute_select add columns(c int) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table_add_int_permute_select +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: alter table table_add_int_permute_select add columns(c int) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table_add_int_permute_select +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_add_int_permute_select +POSTHOOK: Lineage: table_add_int_permute_select.a SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.b SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.c SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.insert_num SCRIPT [] +col1 col2 col3 col4 +PREHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_add_int_permute_select + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,a,b from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num a b +101 -2147483648 original +102 2147483647 original +103 NULL original +104 203332 original +105 -999992 original +111 80000 new +PREHOOK: query: select insert_num,a,b,c from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num a b c +101 -2147483648 original NULL +102 2147483647 original NULL +103 NULL original NULL +104 203332 original NULL +105 -999992 original NULL +111 80000 new 80000 +PREHOOK: query: select insert_num,c from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num c +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 80000 +PREHOOK: query: drop table table_add_int_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_add_int_permute_select +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: drop table table_add_int_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_add_int_permute_select +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: Lineage: table_add_int_string_permute_select.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_add_int_string_permute_select.b SIMPLE [] +POSTHOOK: Lineage: table_add_int_string_permute_select.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table_add_int_string_permute_select add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table_add_int_string_permute_select +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: alter table table_add_int_string_permute_select add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table_add_int_string_permute_select +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: Lineage: table_add_int_string_permute_select.a SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.b SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.c SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.d SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.insert_num SCRIPT [] +col1 col2 col3 col4 col5 +PREHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_string_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_string_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_add_int_string_permute_select + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,a,b from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b +101 -2147483648 original +102 2147483647 original +103 NULL original +104 203332 original +105 -999992 original +111 80000 new +PREHOOK: query: select insert_num,a,b,c from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b c +101 -2147483648 original NULL +102 2147483647 original NULL +103 NULL original NULL +104 203332 original NULL +105 -999992 original NULL +111 80000 new 80000 +PREHOOK: query: select insert_num,a,b,c,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b c d +101 -2147483648 original NULL NULL +102 2147483647 original NULL NULL +103 NULL original NULL NULL +104 203332 original NULL NULL +105 -999992 original NULL NULL +111 80000 new 80000 filler +PREHOOK: query: select insert_num,a,c,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,c,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a c d +101 -2147483648 NULL NULL +102 2147483647 NULL NULL +103 NULL NULL NULL +104 203332 NULL NULL +105 -999992 NULL NULL +111 80000 80000 filler +PREHOOK: query: select insert_num,a,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a d +101 -2147483648 NULL +102 2147483647 NULL +103 NULL NULL +104 203332 NULL +105 -999992 NULL +111 80000 filler +PREHOOK: query: select insert_num,c from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num c +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 80000 +PREHOOK: query: select insert_num,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num d +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 filler +PREHOOK: query: drop table table_add_int_string_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_add_int_string_permute_select +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: drop table table_add_int_string_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_add_int_string_permute_select +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_string_group_double +POSTHOOK: Lineage: table_change_string_group_double.b SIMPLE [] +POSTHOOK: Lineage: table_change_string_group_double.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_string_group_double +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_string_group_double +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_string_group_double +POSTHOOK: Lineage: table_change_string_group_double.b SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,b from table_change_string_group_double +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,b from table_change_string_group_double +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_change_string_group_double + Statistics: Num rows: 6 Data size: 720 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 6 Data size: 720 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 720 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_double +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_double +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 original +102 -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 original +103 NULL NULL NULL original +104 30.774 30.774 30.774 original +105 46114.28 46114.28 46114.28 original +111 789.321 789.321 789.321 new +PREHOOK: query: drop table table_change_string_group_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_string_group_double +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: drop table table_change_string_group_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_string_group_double +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 date1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c11 +PREHOOK: query: alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_date_group_string_group_date_group +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_date_group_string_group_date_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original +102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14.0 1978-08-02 06:34:14.0 1978-08-02 06:3 1978-08-02 06:34:14.0 1978-08-02 06:3 original +105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original +111 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_date_group_string_group_date_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_date_group_string_group_date_group +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: drop table table_change_date_group_string_group_date_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 _c21 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL original +102 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 original +105 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 original +PREHOOK: query: alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c16 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c17 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c18 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c19 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c20 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 +PREHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_change_numeric_group_string_group_multi_ints_string_group + Statistics: Num rows: 6 Data size: 15696 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(50)), c8 (type: char(50)), c9 (type: char(5)), c10 (type: char(5)), c11 (type: char(5)), c12 (type: char(5)), c13 (type: varchar(50)), c14 (type: varchar(50)), c15 (type: varchar(50)), c16 (type: varchar(50)), c17 (type: varchar(5)), c18 (type: varchar(5)), c19 (type: varchar(5)), c20 (type: varchar(5)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21 + Statistics: Num rows: 6 Data size: 15696 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 15696 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL original +102 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 20333 88888 23 834 203332 888888857923222 23 834 20333 88888 original +105 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 original +111 filler filler filler filler filler filler filler filler fille fille fille fille filler filler filler filler fille fille fille fille new +PREHOOK: query: drop table table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: drop table table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 _c16 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 original +102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 original +105 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 original +PREHOOK: query: alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 +PREHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_change_numeric_group_string_group_floating_string_group + Statistics: Num rows: 6 Data size: 10884 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: char(50)), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(7)), c8 (type: char(7)), c9 (type: char(7)), c10 (type: varchar(50)), c11 (type: varchar(50)), c12 (type: varchar(50)), c13 (type: varchar(7)), c14 (type: varchar(7)), c15 (type: varchar(7)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 + Statistics: Num rows: 6 Data size: 10884 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 10884 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original +102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original +105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original +111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: Lineage: table_change_string_group_string_group_string.b SIMPLE [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c5 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c6 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c7 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c8 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 b +101 freckled freckled freckled freckled original +102 ox ox ox ox original +103 original +104 I cooked I cooked I cooked I cooked original +105 200 200 200 200 original +PREHOOK: query: alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_string_group_string_group_string +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_string_group_string_group_string +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: Lineage: table_change_string_group_string_group_string.b SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 freckled freckled freckled freckled freckled freckled freckled freckled freckled freckled original +102 ox ox ox ox ox ox ox ox ox ox original +103 original +104 I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked original +105 200 200 200 200 200 200 200 200 200 200 original +111 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_string_group_string_group_string +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_string_group_string_group_string +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: drop table table_change_string_group_string_group_string +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_string_group_string_group_string +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.b SIMPLE [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 bigint1 bigint1 bigint1 _c19 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL original +102 127 127 127 127 127 127 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 23 23 23 23 23 834 834 834 834 834 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 original +105 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 original +PREHOOK: query: alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.b SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c16 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c17 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c18 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 -128 -128 -128 -128.000000000000000000 -128.0 -128.0 NULL NULL NULL NULL NULL -2147483648 -2147483648.000000000000000000 -2.14748365E9 -2.147483648E9 NULL NULL NULL original +102 127 127 127 127.000000000000000000 127.0 127.0 32767 32767 32767.000000000000000000 32767.0 32767.0 2147483647 2147483647.000000000000000000 2.14748365E9 2.147483647E9 9223372036854775807.000000000000000000 9.223372E18 9.223372036854776E18 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 23 23 23.000000000000000000 23.0 23.0 834 834 834.000000000000000000 834.0 834.0 203332 203332.000000000000000000 203332.0 203332.0 888888857923222.000000000000000000 8.8888885E14 8.88888857923222E14 original +105 -99 -99 -99 -99.000000000000000000 -99.0 -99.0 -28300 -28300 -28300.000000000000000000 -28300.0 -28300.0 -999992 -999992.000000000000000000 -999992.0 -999992.0 -222282153733.000000000000000000 -2.22282154E11 -2.22282153733E11 original +111 7000 80000 90000000 1234.567800000000000000 9876.543 789.321 80000 90000000 1234.567800000000000000 9876.543 789.321 90000000 1234.567800000000000000 9876.543 789.321 1234.567800000000000000 9876.543 789.321 new +PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.b SIMPLE [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 float1 _c4 +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 99999999999999999999.999999999999999999 99999999999999999999.999999999999999999 Infinity original +102 -99999999999999999999.999999999999999999 -99999999999999999999.999999999999999999 -Infinity original +103 NULL NULL NULL original +104 66475.561431000000000000 66475.561431000000000000 -100.35978 original +105 9250340.750000000000000000 9250340.750000000000000000 NULL original +PREHOOK: query: alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.b SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 1.0E20 1.0E20 Infinity original +102 -1.0E20 -1.0E20 -Infinity original +103 NULL NULL NULL original +104 66475.56 66475.561431 -100.35978 original +105 9250341.0 9250340.75 NULL original +111 1234.5677 9876.543 1234.5678 new +PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out index 4f8b05e..8322fc5 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out @@ -109,9 +109,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -268,9 +269,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -481,9 +483,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -633,9 +636,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -854,9 +858,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 22 @@ -1065,9 +1070,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 17 @@ -1262,9 +1268,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -1487,9 +1494,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 20 @@ -1654,9 +1662,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out index f2cf4f9..a0f7010 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out @@ -183,9 +183,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -487,9 +488,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -717,9 +719,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out new file mode 100644 index 0000000..a8e893a --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out @@ -0,0 +1,768 @@ +PREHOOK: query: CREATE TABLE part_change_various_various_struct1(insert_num int, s1 STRUCT, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_struct1 +POSTHOOK: query: CREATE TABLE part_change_various_various_struct1(insert_num int, s1 STRUCT, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_struct1 +PREHOOK: query: CREATE TABLE complex_struct1_a_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct1_a_txt +POSTHOOK: query: CREATE TABLE complex_struct1_a_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct1_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_a.txt' overwrite into table complex_struct1_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct1_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_a.txt' overwrite into table complex_struct1_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct1_a_txt +PREHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct1_a_txt +PREHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct1_a_txt +POSTHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).b SIMPLE [(complex_struct1_a_txt)complex_struct1_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).insert_num SIMPLE [(complex_struct1_a_txt)complex_struct1_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).s1 SIMPLE [(complex_struct1_a_txt)complex_struct1_a_txt.FieldSchema(name:s1, type:struct, comment:null), ] +complex_struct1_a_txt.insert_num complex_struct1_a_txt.s1 complex_struct1_a_txt.b +PREHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Input: default@part_change_various_various_struct1@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Input: default@part_change_various_various_struct1@part=1 +#### A masked pattern was here #### +insert_num part s1 b +1 1 {"c1":true,"c2":null,"c3":null,"c4":3244222,"c5":-99999999999,"c6":-29.0764,"c7":4.70614135E8,"c8":470614135,"c9":"dynamic reptile ","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":n)گ} original +2 1 {"c1":null,"c2":100,"c3":null,"c4":14,"c5":-23866739993,"c6":-3651.672,"c7":46114.284799488,"c8":46114.284799488,"c9":" baffling ","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":n)گ} original +3 1 {"c1":false,"c2":72,"c3":null,"c4":-93222,"c5":30,"c6":-66475.56,"c7":-66475.561431,"c8":0.561431,"c9":"1 ","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":n)گ} original +4 1 {"c1":null,"c2":-90,"c3":null,"c4":3289094,"c5":46114,"c6":9250341.0,"c7":9250340.75,"c8":9250340.75,"c9":"junkyard ","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":n)گ} original +PREHOOK: query: alter table part_change_various_various_struct1 replace columns (insert_num int, s1 STRUCT, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Output: default@part_change_various_various_struct1 +POSTHOOK: query: alter table part_change_various_various_struct1 replace columns (insert_num int, s1 STRUCT, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Output: default@part_change_various_various_struct1 +PREHOOK: query: CREATE TABLE complex_struct1_b_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct1_b_txt +POSTHOOK: query: CREATE TABLE complex_struct1_b_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct1_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_b.txt' overwrite into table complex_struct1_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct1_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_b.txt' overwrite into table complex_struct1_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct1_b_txt +PREHOOK: query: insert into table part_change_various_various_struct1 partition(part=2) select * from complex_struct1_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct1_b_txt +PREHOOK: Output: default@part_change_various_various_struct1@part=2 +POSTHOOK: query: insert into table part_change_various_various_struct1 partition(part=2) select * from complex_struct1_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct1_b_txt +POSTHOOK: Output: default@part_change_various_various_struct1@part=2 +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=2).b SIMPLE [(complex_struct1_b_txt)complex_struct1_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=2).insert_num SIMPLE [(complex_struct1_b_txt)complex_struct1_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=2).s1 SIMPLE [(complex_struct1_b_txt)complex_struct1_b_txt.FieldSchema(name:s1, type:struct, comment:null), ] +complex_struct1_b_txt.insert_num complex_struct1_b_txt.s1 complex_struct1_b_txt.b +PREHOOK: query: CREATE TABLE complex_struct1_c_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct1_c_txt +POSTHOOK: query: CREATE TABLE complex_struct1_c_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct1_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_c.txt' overwrite into table complex_struct1_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct1_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_c.txt' overwrite into table complex_struct1_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct1_c_txt +PREHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct1_c_txt +PREHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct1_c_txt +POSTHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).b SIMPLE [(complex_struct1_c_txt)complex_struct1_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).insert_num SIMPLE [(complex_struct1_c_txt)complex_struct1_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).s1 SIMPLE [(complex_struct1_c_txt)complex_struct1_c_txt.FieldSchema(name:s1, type:struct, comment:null), ] +complex_struct1_c_txt.insert_num complex_struct1_c_txt.s1 complex_struct1_c_txt.b +PREHOOK: query: explain vectorization detail +select insert_num,part,s1,b from part_change_various_various_struct1 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,s1,b from part_change_various_various_struct1 +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_struct1 + Statistics: Num rows: 6 Data size: 21030 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:s1:struct, 2:b:string, 3:part:int, 4:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), s1 (type: struct), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 3, 1, 2] + Statistics: Num rows: 6 Data size: 16320 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 16320 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: insert_num:int, s1:struct, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Input: default@part_change_various_various_struct1@part=1 +PREHOOK: Input: default@part_change_various_various_struct1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Input: default@part_change_various_various_struct1@part=1 +POSTHOOK: Input: default@part_change_various_various_struct1@part=2 +#### A masked pattern was here #### +insert_num part s1 b +1 1 {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} original +2 1 {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"} original +3 1 {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} original +4 1 {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} original +5 2 {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"} new +6 1 {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"} new +PREHOOK: query: drop table part_change_various_various_struct1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Output: default@part_change_various_various_struct1 +POSTHOOK: query: drop table part_change_various_various_struct1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Output: default@part_change_various_various_struct1 +PREHOOK: query: CREATE TABLE part_add_various_various_struct2(insert_num int, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: CREATE TABLE part_add_various_various_struct2(insert_num int, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) + values(1, 'original'), + (2, 'original') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) + values(1, 'original'), + (2, 'original') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).insert_num SCRIPT [] +col1 col2 +PREHOOK: query: select insert_num,part,b from part_add_various_various_struct2 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Input: default@part_add_various_various_struct2@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b from part_add_various_various_struct2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Input: default@part_add_various_various_struct2@part=1 +#### A masked pattern was here #### +insert_num part b +1 1 original +2 1 original +PREHOOK: query: alter table part_add_various_various_struct2 ADD columns (s2 STRUCT) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: alter table part_add_various_various_struct2 ADD columns (s2 STRUCT) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: CREATE TABLE complex_struct2_a_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_a_txt +POSTHOOK: query: CREATE TABLE complex_struct2_a_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_a.txt' overwrite into table complex_struct2_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_a.txt' overwrite into table complex_struct2_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_a_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_a_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_a_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).b SIMPLE [(complex_struct2_a_txt)complex_struct2_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).insert_num SIMPLE [(complex_struct2_a_txt)complex_struct2_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).s2 SIMPLE [(complex_struct2_a_txt)complex_struct2_a_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_a_txt.insert_num complex_struct2_a_txt.b complex_struct2_a_txt.s2 +PREHOOK: query: CREATE TABLE complex_struct2_b_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_b_txt +POSTHOOK: query: CREATE TABLE complex_struct2_b_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_b.txt' overwrite into table complex_struct2_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_b.txt' overwrite into table complex_struct2_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_b_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_b_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_b_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).b SIMPLE [(complex_struct2_b_txt)complex_struct2_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).insert_num SIMPLE [(complex_struct2_b_txt)complex_struct2_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).s2 SIMPLE [(complex_struct2_b_txt)complex_struct2_b_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_b_txt.insert_num complex_struct2_b_txt.b complex_struct2_b_txt.s2 +PREHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Input: default@part_add_various_various_struct2@part=1 +PREHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Input: default@part_add_various_various_struct2@part=1 +POSTHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +insert_num part b s2 +1 1 original NULL +2 1 original NULL +3 1 new {"c1":true,"c2":null,"c3":null,"c4":3244222,"c5":-99999999999,"c6":-29.0764,"c7":4.70614135E8,"c8":470614135,"c9":"dynamic reptile ","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":n)گ} +4 1 new {"c1":null,"c2":100,"c3":null,"c4":14,"c5":-23866739993,"c6":-3651.672,"c7":46114.284799488,"c8":46114.284799488,"c9":" baffling ","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":n)گ} +5 2 new {"c1":false,"c2":72,"c3":null,"c4":-93222,"c5":30,"c6":-66475.56,"c7":-66475.561431,"c8":0.561431,"c9":"1 ","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":n)گ} +6 2 new {"c1":null,"c2":-90,"c3":null,"c4":3289094,"c5":46114,"c6":9250341.0,"c7":9250340.75,"c8":9250340.75,"c9":"junkyard ","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":n)گ} +PREHOOK: query: alter table part_add_various_various_struct2 REPLACE columns (insert_num int, b STRING, s2 STRUCT) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: alter table part_add_various_various_struct2 REPLACE columns (insert_num int, b STRING, s2 STRUCT) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: CREATE TABLE complex_struct2_c_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_c_txt +POSTHOOK: query: CREATE TABLE complex_struct2_c_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_c.txt' overwrite into table complex_struct2_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_c.txt' overwrite into table complex_struct2_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_c_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_c_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_c_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).b SIMPLE [(complex_struct2_c_txt)complex_struct2_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).insert_num SIMPLE [(complex_struct2_c_txt)complex_struct2_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).s2 SIMPLE [(complex_struct2_c_txt)complex_struct2_c_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_c_txt.insert_num complex_struct2_c_txt.b complex_struct2_c_txt.s2 +PREHOOK: query: CREATE TABLE complex_struct2_d_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_d_txt +POSTHOOK: query: CREATE TABLE complex_struct2_d_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_d_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_d.txt' overwrite into table complex_struct2_d_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_d_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_d.txt' overwrite into table complex_struct2_d_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_d_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_d_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_d_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_d_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_d_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).b SIMPLE [(complex_struct2_d_txt)complex_struct2_d_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).insert_num SIMPLE [(complex_struct2_d_txt)complex_struct2_d_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).s2 SIMPLE [(complex_struct2_d_txt)complex_struct2_d_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_d_txt.insert_num complex_struct2_d_txt.b complex_struct2_d_txt.s2 +PREHOOK: query: explain vectorization detail +select insert_num,part,b,s2 from part_add_various_various_struct2 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,b,s2 from part_add_various_various_struct2 +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_various_various_struct2 + Statistics: Num rows: 8 Data size: 800 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:b:string, 2:s2:struct, 3:part:int, 4:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), b (type: string), s2 (type: struct) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 3, 1, 2] + Statistics: Num rows: 8 Data size: 800 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 8 Data size: 800 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: insert_num:int, b:string, s2:struct + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Input: default@part_add_various_various_struct2@part=1 +PREHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Input: default@part_add_various_various_struct2@part=1 +POSTHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +insert_num part b s2 +1 1 original NULL +2 1 original NULL +3 1 new {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} +4 1 new {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"} +5 2 new {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} +6 2 new {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} +7 2 new {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"} +8 1 new {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"} +PREHOOK: query: drop table part_add_various_various_struct2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: drop table part_add_various_various_struct2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: CREATE TABLE part_add_to_various_various_struct4(insert_num int, b STRING, s3 STRUCT) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_to_various_various_struct4 +POSTHOOK: query: CREATE TABLE part_add_to_various_various_struct4(insert_num int, b STRING, s3 STRUCT) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_to_various_various_struct4 +PREHOOK: query: CREATE TABLE complex_struct4_a_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct4_a_txt +POSTHOOK: query: CREATE TABLE complex_struct4_a_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct4_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_a.txt' overwrite into table complex_struct4_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct4_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_a.txt' overwrite into table complex_struct4_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct4_a_txt +PREHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct4_a_txt +PREHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct4_a_txt +POSTHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).b SIMPLE [(complex_struct4_a_txt)complex_struct4_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).insert_num SIMPLE [(complex_struct4_a_txt)complex_struct4_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).s3 SIMPLE [(complex_struct4_a_txt)complex_struct4_a_txt.FieldSchema(name:s3, type:struct, comment:null), ] +complex_struct4_a_txt.insert_num complex_struct4_a_txt.b complex_struct4_a_txt.s3 +PREHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Input: default@part_add_to_various_various_struct4@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Input: default@part_add_to_various_various_struct4@part=1 +#### A masked pattern was here #### +insert_num part b s3 +1 1 original {"c1":true,"c2":null,"c3":null,"c4":3244222,"c5":-99999999999} +2 1 original {"c1":null,"c2":100,"c3":null,"c4":14,"c5":-23866739993} +PREHOOK: query: alter table part_add_to_various_various_struct4 replace columns (insert_num int, b STRING, s3 STRUCT) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Output: default@part_add_to_various_various_struct4 +POSTHOOK: query: alter table part_add_to_various_various_struct4 replace columns (insert_num int, b STRING, s3 STRUCT) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Output: default@part_add_to_various_various_struct4 +PREHOOK: query: CREATE TABLE complex_struct4_b_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct4_b_txt +POSTHOOK: query: CREATE TABLE complex_struct4_b_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct4_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_b.txt' overwrite into table complex_struct4_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct4_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_b.txt' overwrite into table complex_struct4_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct4_b_txt +PREHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=2) select * from complex_struct4_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct4_b_txt +PREHOOK: Output: default@part_add_to_various_various_struct4@part=2 +POSTHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=2) select * from complex_struct4_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct4_b_txt +POSTHOOK: Output: default@part_add_to_various_various_struct4@part=2 +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=2).b SIMPLE [(complex_struct4_b_txt)complex_struct4_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=2).insert_num SIMPLE [(complex_struct4_b_txt)complex_struct4_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=2).s3 SIMPLE [(complex_struct4_b_txt)complex_struct4_b_txt.FieldSchema(name:s3, type:struct, comment:null), ] +complex_struct4_b_txt.insert_num complex_struct4_b_txt.b complex_struct4_b_txt.s3 +PREHOOK: query: CREATE TABLE complex_struct4_c_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct4_c_txt +POSTHOOK: query: CREATE TABLE complex_struct4_c_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct4_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_c.txt' overwrite into table complex_struct4_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct4_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_c.txt' overwrite into table complex_struct4_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct4_c_txt +PREHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct4_c_txt +PREHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct4_c_txt +POSTHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).b SIMPLE [(complex_struct4_c_txt)complex_struct4_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).insert_num SIMPLE [(complex_struct4_c_txt)complex_struct4_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).s3 SIMPLE [(complex_struct4_c_txt)complex_struct4_c_txt.FieldSchema(name:s3, type:struct, comment:null), ] +complex_struct4_c_txt.insert_num complex_struct4_c_txt.b complex_struct4_c_txt.s3 +PREHOOK: query: explain vectorization detail +select insert_num,part,b,s3 from part_add_to_various_various_struct4 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,b,s3 from part_add_to_various_various_struct4 +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_to_various_various_struct4 + Statistics: Num rows: 4 Data size: 4892 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:b:string, 2:s3:struct, 3:part:int, 4:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), b (type: string), s3 (type: struct) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 3, 1, 2] + Statistics: Num rows: 4 Data size: 3736 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 4 Data size: 3736 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: insert_num:int, b:string, s3:struct + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Input: default@part_add_to_various_various_struct4@part=1 +PREHOOK: Input: default@part_add_to_various_various_struct4@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Input: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: Input: default@part_add_to_various_various_struct4@part=2 +#### A masked pattern was here #### +insert_num part b s3 +1 1 original {"c1":true,"c2":null,"c3":null,"c4":3244222,"c5":-99999999999,"c6":null,"c7":null,"c8":null,"c9":null,"c10":null,"c11":null,"c12":null,"c13":null} +2 1 original {"c1":null,"c2":100,"c3":null,"c4":14,"c5":-23866739993,"c6":null,"c7":null,"c8":null,"c9":null,"c10":null,"c11":null,"c12":null,"c13":null} +3 2 new {"c1":true,"c2":null,"c3":null,"c4":-100,"c5":953967041,"c6":62.07915,"c7":718.78,"c8":1,"c9":"verdict ","c10":"verdict","c11":null,"c12":null,"c13":n)گ} +4 1 new {"c1":false,"c2":-67,"c3":833,"c4":63993,"c5":1255178165,"c6":905071.0,"c7":-4314.7918,"c8":-1240033819,"c9":"trial ","c10":"trial","c11":null,"c12":"2016-03-07","c13":n)گ} +PREHOOK: query: drop table part_add_to_various_various_struct4 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Output: default@part_add_to_various_various_struct4 +POSTHOOK: query: drop table part_add_to_various_various_struct4 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Output: default@part_add_to_various_various_struct4 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out index ca9066e..9552f06 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out @@ -294,9 +294,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 55 @@ -545,9 +546,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 35 @@ -718,9 +720,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 14 @@ -875,9 +878,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 6 @@ -1113,9 +1117,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out new file mode 100644 index 0000000..17fa938 --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out @@ -0,0 +1,1177 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: CREATE TABLE part_change_various_various_boolean_to_bigint(insert_num int, + c1 TINYINT, c2 SMALLINT, c3 INT, c4 BIGINT, c5 FLOAT, c6 DOUBLE, c7 DECIMAL(38,18), c8 STRING, c9 TIMESTAMP, + c10 BOOLEAN, c11 SMALLINT, c12 INT, c13 BIGINT, c14 FLOAT, c15 DOUBLE, c16 DECIMAL(38,18), c17 STRING, c18 CHAR(25), c19 VARCHAR(25), c20 TIMESTAMP, + c21 BOOLEAN, c22 TINYINT, c23 INT, c24 BIGINT, c25 FLOAT, c26 DOUBLE, c27 DECIMAL(38,18), c28 STRING, c29 CHAR(25), c30 VARCHAR(25), c31 TIMESTAMP, + c32 BOOLEAN, c33 TINYINT, c34 SMALLINT, c35 BIGINT, c36 FLOAT, c37 DOUBLE, c38 DECIMAL(38,18), c39 STRING, c40 CHAR(25), c41 VARCHAR(25), c42 TIMESTAMP, + c43 BOOLEAN, c44 TINYINT, c45 SMALLINT, c46 INT, c47 FLOAT, c48 DOUBLE, c49 DECIMAL(38,18), c50 STRING, c51 CHAR(25), c52 VARCHAR(25), c53 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint +POSTHOOK: query: CREATE TABLE part_change_various_various_boolean_to_bigint(insert_num int, + c1 TINYINT, c2 SMALLINT, c3 INT, c4 BIGINT, c5 FLOAT, c6 DOUBLE, c7 DECIMAL(38,18), c8 STRING, c9 TIMESTAMP, + c10 BOOLEAN, c11 SMALLINT, c12 INT, c13 BIGINT, c14 FLOAT, c15 DOUBLE, c16 DECIMAL(38,18), c17 STRING, c18 CHAR(25), c19 VARCHAR(25), c20 TIMESTAMP, + c21 BOOLEAN, c22 TINYINT, c23 INT, c24 BIGINT, c25 FLOAT, c26 DOUBLE, c27 DECIMAL(38,18), c28 STRING, c29 CHAR(25), c30 VARCHAR(25), c31 TIMESTAMP, + c32 BOOLEAN, c33 TINYINT, c34 SMALLINT, c35 BIGINT, c36 FLOAT, c37 DOUBLE, c38 DECIMAL(38,18), c39 STRING, c40 CHAR(25), c41 VARCHAR(25), c42 TIMESTAMP, + c43 BOOLEAN, c44 TINYINT, c45 SMALLINT, c46 INT, c47 FLOAT, c48 DOUBLE, c49 DECIMAL(38,18), c50 STRING, c51 CHAR(25), c52 VARCHAR(25), c53 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint +PREHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, boolean_str, timestamp1, + boolean1, smallint1, int1, bigint1, float1, double1, decimal1, tinyint_str, tinyint_str, tinyint_str, timestamp1, + boolean1, tinyint1, int1, bigint1, float1, double1, decimal1, smallint_str, smallint_str, smallint_str, timestamp1, + boolean1, tinyint1, smallint1, bigint1, float1, double1, decimal1, int_str, int_str, int_str, timestamp1, + boolean1, tinyint1, smallint1, int1, float1, double1, decimal1, bigint_str, bigint_str, bigint_str, timestamp1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, boolean_str, timestamp1, + boolean1, smallint1, int1, bigint1, float1, double1, decimal1, tinyint_str, tinyint_str, tinyint_str, timestamp1, + boolean1, tinyint1, int1, bigint1, float1, double1, decimal1, smallint_str, smallint_str, smallint_str, timestamp1, + boolean1, tinyint1, smallint1, bigint1, float1, double1, decimal1, int_str, int_str, int_str, timestamp1, + boolean1, tinyint1, smallint1, int1, float1, double1, decimal1, bigint_str, bigint_str, bigint_str, timestamp1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c18 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c19 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c21 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c22 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c23 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c24 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c25 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c26 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c27 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c28 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c29 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c30 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c31 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c32 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c33 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c34 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c35 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c36 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c37 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c38 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c39 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c40 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c41 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c42 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c43 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c44 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c45 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c46 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c47 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c48 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c49 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c50 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c51 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c52 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c53 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 _col22 _col23 _col24 _col25 _col26 _col27 _col28 _col29 _col30 _col31 _col32 _col33 _col34 _col35 _col36 _col37 _col38 _col39 _col40 _col41 _col42 _col43 _col44 _col45 _col46 _col47 _col48 _col49 _col50 _col51 _col52 _col53 _col54 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 c34 c35 c36 c37 c38 c39 c40 c41 c42 c43 c44 c45 c46 c47 c48 c49 c50 c51 c52 c53 b +101 1 -128 NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 true 6229-06-28 02:54:28.970117179 true NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 -128 -128 -128 6229-06-28 02:54:28.970117179 true -128 -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 –32768 –32768 –32768 6229-06-28 02:54:28.970117179 true -128 NULL NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 -2147483648 -2147483648 -2147483648 6229-06-28 02:54:28.970117179 true -128 NULL -2147483648 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 –9223372036854775808 –9223372036854775808 –9223372036854775808 6229-06-28 02:54:28.970117179 original +102 1 127 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 false 5966-07-09 03:30:50.597 false 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 127 127 127 5966-07-09 03:30:50.597 false 127 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 32767 32767 32767 5966-07-09 03:30:50.597 false 127 32767 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 2147483647 2147483647 2147483647 5966-07-09 03:30:50.597 false 127 32767 2147483647 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 9223372036854775807 9223372036854775807 9223372036854775807 5966-07-09 03:30:50.597 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 true 1978-08-02 06:34:14 true 834 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 23 23 23 1978-08-02 06:34:14 true 23 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 834 834 834 1978-08-02 06:34:14 true 23 834 888888857923222 -100.35978 30.774 66475.561431000000000000 203332 203332 203332 1978-08-02 06:34:14 true 23 834 203332 -100.35978 30.774 66475.561431000000000000 888888857923222 888888857923222 888888857923222 1978-08-02 06:34:14 original +105 1 -99 -28300 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 false 1991-01-06 16:20:39.72036854 false -28300 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 -99 -99 -99 1991-01-06 16:20:39.72036854 false -99 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 -28300 -28300 -28300 1991-01-06 16:20:39.72036854 false -99 -28300 -222282153733 NULL 46114.28 9250340.750000000000000000 -999992 -999992 -999992 1991-01-06 16:20:39.72036854 false -99 -28300 -999992 NULL 46114.28 9250340.750000000000000000 -222282153733 -222282153733 -222282153733 1991-01-06 16:20:39.72036854 original +PREHOOK: query: alter table part_change_various_various_boolean_to_bigint replace columns (insert_num int, + c1 BOOLEAN, c2 BOOLEAN, c3 BOOLEAN, c4 BOOLEAN, c5 BOOLEAN, c6 BOOLEAN, c7 BOOLEAN, c8 BOOLEAN, c9 BOOLEAN, + c10 TINYINT, c11 TINYINT, c12 TINYINT, c13 TINYINT, c14 TINYINT, c15 TINYINT, c16 TINYINT, c17 TINYINT, c18 TINYINT, c19 TINYINT, c20 TINYINT, + c21 SMALLINT, c22 SMALLINT, c23 SMALLINT, c24 SMALLINT, c25 SMALLINT, c26 SMALLINT, c27 SMALLINT, c28 SMALLINT, c29 SMALLINT, c30 SMALLINT, c31 SMALLINT, + c32 INT, c33 INT, c34 INT, c35 INT, c36 INT, c37 INT, c38 INT, c39 INT, c40 INT, c41 INT, c42 INT, + c43 BIGINT, c44 BIGINT, c45 BIGINT, c46 BIGINT, c47 BIGINT, c48 BIGINT, c49 BIGINT, c50 BIGINT, c51 BIGINT, c52 BIGINT, c53 BIGINT, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint +POSTHOOK: query: alter table part_change_various_various_boolean_to_bigint replace columns (insert_num int, + c1 BOOLEAN, c2 BOOLEAN, c3 BOOLEAN, c4 BOOLEAN, c5 BOOLEAN, c6 BOOLEAN, c7 BOOLEAN, c8 BOOLEAN, c9 BOOLEAN, + c10 TINYINT, c11 TINYINT, c12 TINYINT, c13 TINYINT, c14 TINYINT, c15 TINYINT, c16 TINYINT, c17 TINYINT, c18 TINYINT, c19 TINYINT, c20 TINYINT, + c21 SMALLINT, c22 SMALLINT, c23 SMALLINT, c24 SMALLINT, c25 SMALLINT, c26 SMALLINT, c27 SMALLINT, c28 SMALLINT, c29 SMALLINT, c30 SMALLINT, c31 SMALLINT, + c32 INT, c33 INT, c34 INT, c35 INT, c36 INT, c37 INT, c38 INT, c39 INT, c40 INT, c41 INT, c42 INT, + c43 BIGINT, c44 BIGINT, c45 BIGINT, c46 BIGINT, c47 BIGINT, c48 BIGINT, c49 BIGINT, c50 BIGINT, c51 BIGINT, c52 BIGINT, c53 BIGINT, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint +PREHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, + bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, + 'new' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, + bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, + 'new' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c21 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c22 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c23 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c24 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c25 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c26 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c27 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c28 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c29 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c30 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c31 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c32 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c33 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c34 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c35 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c36 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c37 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c38 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c39 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c40 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c41 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c42 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c43 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c44 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c45 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c46 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c47 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c48 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c49 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c50 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c51 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c52 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c53 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 int1 int1 int1 int1 int1 int1 int1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 _c54 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_boolean_to_bigint + Statistics: Num rows: 10 Data size: 1228 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:boolean, 2:c2:boolean, 3:c3:boolean, 4:c4:boolean, 5:c5:boolean, 6:c6:boolean, 7:c7:boolean, 8:c8:boolean, 9:c9:boolean, 10:c10:tinyint, 11:c11:tinyint, 12:c12:tinyint, 13:c13:tinyint, 14:c14:tinyint, 15:c15:tinyint, 16:c16:tinyint, 17:c17:tinyint, 18:c18:tinyint, 19:c19:tinyint, 20:c20:tinyint, 21:c21:smallint, 22:c22:smallint, 23:c23:smallint, 24:c24:smallint, 25:c25:smallint, 26:c26:smallint, 27:c27:smallint, 28:c28:smallint, 29:c29:smallint, 30:c30:smallint, 31:c31:smallint, 32:c32:int, 33:c33:int, 34:c34:int, 35:c35:int, 36:c36:int, 37:c37:int, 38:c38:int, 39:c39:int, 40:c40:int, 41:c41:int, 42:c42:int, 43:c43:bigint, 44:c44:bigint, 45:c45:bigint, 46:c46:bigint, 47:c47:bigint, 48:c48:bigint, 49:c49:bigint, 50:c50:bigint, 51:c51:bigint, 52:c52:bigint, 53:c53:bigint, 54:b:string, 55:part:int, 56:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: boolean), c2 (type: boolean), c3 (type: boolean), c4 (type: boolean), c5 (type: boolean), c6 (type: boolean), c7 (type: boolean), c8 (type: boolean), c9 (type: boolean), c10 (type: tinyint), c11 (type: tinyint), c12 (type: tinyint), c13 (type: tinyint), c14 (type: tinyint), c15 (type: tinyint), c16 (type: tinyint), c17 (type: tinyint), c18 (type: tinyint), c19 (type: tinyint), c20 (type: tinyint), c21 (type: smallint), c22 (type: smallint), c23 (type: smallint), c24 (type: smallint), c25 (type: smallint), c26 (type: smallint), c27 (type: smallint), c28 (type: smallint), c29 (type: smallint), c30 (type: smallint), c31 (type: smallint), c32 (type: int), c33 (type: int), c34 (type: int), c35 (type: int), c36 (type: int), c37 (type: int), c38 (type: int), c39 (type: int), c40 (type: int), c41 (type: int), c42 (type: int), c43 (type: bigint), c44 (type: bigint), c45 (type: bigint), c46 (type: bigint), c47 (type: bigint), c48 (type: bigint), c49 (type: bigint), c50 (type: bigint), c51 (type: bigint), c52 (type: bigint), c53 (type: bigint), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44, _col45, _col46, _col47, _col48, _col49, _col50, _col51, _col52, _col53, _col54, _col55 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 55, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54] + Statistics: Num rows: 10 Data size: 1228 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 1228 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 55 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54] + dataColumns: insert_num:int, c1:boolean, c2:boolean, c3:boolean, c4:boolean, c5:boolean, c6:boolean, c7:boolean, c8:boolean, c9:boolean, c10:tinyint, c11:tinyint, c12:tinyint, c13:tinyint, c14:tinyint, c15:tinyint, c16:tinyint, c17:tinyint, c18:tinyint, c19:tinyint, c20:tinyint, c21:smallint, c22:smallint, c23:smallint, c24:smallint, c25:smallint, c26:smallint, c27:smallint, c28:smallint, c29:smallint, c30:smallint, c31:smallint, c32:int, c33:int, c34:int, c35:int, c36:int, c37:int, c38:int, c39:int, c40:int, c41:int, c42:int, c43:bigint, c44:bigint, c45:bigint, c46:bigint, c47:bigint, c48:bigint, c49:bigint, c50:bigint, c51:bigint, c52:bigint, c53:bigint, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 c34 c35 c36 c37 c38 c39 c40 c41 c42 c43 c44 c45 c46 c47 c48 c49 c50 c51 c52 c53 b +101 1 true NULL true NULL NULL NULL true NULL true 1 NULL NULL NULL NULL NULL NULL -128 -128 -128 NULL 1 -128 NULL NULL NULL NULL NULL NULL NULL NULL NULL 1 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 NULL 1 -128 NULL -2147483648 NULL NULL NULL NULL NULL NULL 134416490068 original +101 1 true true true true true true true true true -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +102 1 false false false false false false false false false 127 127 127 127 127 127 127 127 127 127 127 32767 32767 32767 32767 32767 32767 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 new +102 1 true true true true NULL NULL true NULL true 0 NULL NULL NULL NULL NULL NULL 127 127 127 NULL 0 127 NULL NULL NULL NULL NULL 32767 32767 32767 NULL 0 127 32767 NULL NULL NULL NULL 2147483647 2147483647 2147483647 NULL 0 127 32767 2147483647 NULL NULL NULL 9223372036854775807 9223372036854775807 9223372036854775807 126117945050 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 true true true true true true true NULL true 1 NULL NULL NULL -100 30 NULL 23 23 23 NULL 1 23 NULL NULL -100 30 NULL 834 834 834 NULL 1 23 834 NULL -100 30 66475 203332 203332 203332 270912854 1 23 834 203332 -100 30 66475 888888857923222 888888857923222 888888857923222 270912854 original +104 1 true true true true true true true true true 23 23 23 23 23 23 23 23 23 23 23 834 834 834 834 834 834 834 834 834 834 834 203332 203332 203332 203332 203332 203332 203332 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 new +105 1 false false false false false false false false false -99 -99 -99 -99 -99 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -28300 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -999992 -999992 -999992 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 new +105 1 true true true true NULL true true NULL true 0 NULL NULL NULL NULL NULL NULL -99 -99 -99 NULL 0 -99 NULL NULL NULL NULL NULL -28300 -28300 -28300 NULL 0 -99 -28300 NULL NULL 46114 9250340 -999992 -999992 -999992 663207639 0 -99 -28300 -999992 NULL 46114 9250340 -222282153733 -222282153733 -222282153733 663207639 original +PREHOOK: query: drop table part_change_various_various_boolean_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint +POSTHOOK: query: drop table part_change_various_various_boolean_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint +PREHOOK: query: CREATE TABLE part_change_various_various_decimal_to_double(insert_num int, + c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 STRING, c9 CHAR(25), c10 VARCHAR(25), c11 TIMESTAMP, + c12 BOOLEAN, c13 TINYINT, c14 SMALLINT, c15 INT, c16 BIGINT, c17 DECIMAL(38,18), c18 DOUBLE, c19 STRING, c20 CHAR(25), c21 VARCHAR(25), c22 TIMESTAMP, + c23 BOOLEAN, c24 TINYINT, c25 SMALLINT, c26 INT, c27 BIGINT, c28 DECIMAL(38,18), c29 FLOAT, c30 STRING, c31 CHAR(25), c32 VARCHAR(25), c33 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_decimal_to_double +POSTHOOK: query: CREATE TABLE part_change_various_various_decimal_to_double(insert_num int, + c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 STRING, c9 CHAR(25), c10 VARCHAR(25), c11 TIMESTAMP, + c12 BOOLEAN, c13 TINYINT, c14 SMALLINT, c15 INT, c16 BIGINT, c17 DECIMAL(38,18), c18 DOUBLE, c19 STRING, c20 CHAR(25), c21 VARCHAR(25), c22 TIMESTAMP, + c23 BOOLEAN, c24 TINYINT, c25 SMALLINT, c26 INT, c27 BIGINT, c28 DECIMAL(38,18), c29 FLOAT, c30 STRING, c31 CHAR(25), c32 VARCHAR(25), c33 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_decimal_to_double +PREHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal_str, decimal_str, decimal_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, double1, float_str, float_str, float_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, float1, double_str, double_str, double_str, timestamp1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal_str, decimal_str, decimal_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, double1, float_str, float_str, float_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, float1, double_str, double_str, double_str, timestamp1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c20 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c21 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c22 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c23 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c24 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c25 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c26 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c27 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c28 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c29 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c30 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c31 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c32 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c33 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 _col22 _col23 _col24 _col25 _col26 _col27 _col28 _col29 _col30 _col31 _col32 _col33 _col34 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 b +101 1 true -128 NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 99999999999999999999.9999 99999999999999999999.9999 6229-06-28 02:54:28.970117179 true -128 NULL -2147483648 NULL 99999999999999999999.999999999999999999 1.7976931348623157E308 340282347000000000000000000000000000000000 3402823470000000000000000 3402823470000000000000000 6229-06-28 02:54:28.970117179 true -128 NULL -2147483648 NULL 99999999999999999999.999999999999999999 Infinity 1.79769313486231570E+308 1.79769313486231570E+308 1.79769313486231570E+308 6229-06-28 02:54:28.970117179 original +102 1 false 127 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -99999999999999999999.999 -99999999999999999999.999 5966-07-09 03:30:50.597 false 127 32767 2147483647 9223372036854775807 -99999999999999999999.999999999999999999 -1.7976931348623157E308 -340282347000000000000000000000000000000000 -340282347000000000000000 -340282347000000000000000 5966-07-09 03:30:50.597 false 127 32767 2147483647 9223372036854775807 -99999999999999999999.999999999999999999 -Infinity -1.79769313486231570E+308 -1.79769313486231570E+308 -1.79769313486231570E+308 5966-07-09 03:30:50.597 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 true 23 834 203332 888888857923222 -100.35978 30.774 66475.561431 66475.561431 66475.561431 1978-08-02 06:34:14 true 23 834 203332 888888857923222 66475.561431000000000000 30.774 -100.3597812 -100.3597812 -100.3597812 1978-08-02 06:34:14 true 23 834 203332 888888857923222 66475.561431000000000000 -100.35978 30.774 30.774 30.774 1978-08-02 06:34:14 original +105 1 false -99 -28300 -999992 -222282153733 NULL 46114.28 9250340.75 9250340.75 9250340.75 1991-01-06 16:20:39.72036854 false -99 -28300 -999992 -222282153733 9250340.750000000000000000 46114.28 –32768 –32768 –32768 1991-01-06 16:20:39.72036854 false -99 -28300 -999992 -222282153733 9250340.750000000000000000 NULL 46114.28 46114.28 46114.28 1991-01-06 16:20:39.72036854 original +PREHOOK: query: alter table part_change_various_various_decimal_to_double replace columns (insert_num int, + c1 DECIMAL(38,18), c2 DECIMAL(38,18), c3 DECIMAL(38,18), c4 DECIMAL(38,18), c5 DECIMAL(38,18), c6 DECIMAL(38,18), c7 DECIMAL(38,18), c8 DECIMAL(38,18), c9 DECIMAL(38,18), c10 DECIMAL(38,18), c11 DECIMAL(38,18), + c12 FLOAT, c13 FLOAT, c14 FLOAT, c15 FLOAT, c16 FLOAT, c17 FLOAT, c18 FLOAT, c19 FLOAT, c20 FLOAT, c21 FLOAT, c22 FLOAT, + c23 DOUBLE, c24 DOUBLE, c25 DOUBLE, c26 DOUBLE, c27 DOUBLE, c28 DOUBLE, c29 DOUBLE, c30 DOUBLE, c31 DOUBLE, c32 DOUBLE, c33 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Output: default@part_change_various_various_decimal_to_double +POSTHOOK: query: alter table part_change_various_various_decimal_to_double replace columns (insert_num int, + c1 DECIMAL(38,18), c2 DECIMAL(38,18), c3 DECIMAL(38,18), c4 DECIMAL(38,18), c5 DECIMAL(38,18), c6 DECIMAL(38,18), c7 DECIMAL(38,18), c8 DECIMAL(38,18), c9 DECIMAL(38,18), c10 DECIMAL(38,18), c11 DECIMAL(38,18), + c12 FLOAT, c13 FLOAT, c14 FLOAT, c15 FLOAT, c16 FLOAT, c17 FLOAT, c18 FLOAT, c19 FLOAT, c20 FLOAT, c21 FLOAT, c22 FLOAT, + c23 DOUBLE, c24 DOUBLE, c25 DOUBLE, c26 DOUBLE, c27 DOUBLE, c28 DOUBLE, c29 DOUBLE, c30 DOUBLE, c31 DOUBLE, c32 DOUBLE, c33 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Output: default@part_change_various_various_decimal_to_double +PREHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, + float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, + double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, + 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, + float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, + double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, + 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c10 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c11 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c12 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c13 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c14 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c15 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c16 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c17 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c18 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c19 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c2 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c20 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c21 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c22 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c23 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c24 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c25 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c26 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c27 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c28 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c29 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c3 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c30 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c31 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c32 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c33 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c4 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c5 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c6 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c7 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c8 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c9 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 float1 float1 float1 float1 float1 float1 float1 float1 float1 float1 float1 double1 double1 double1 double1 double1 double1 double1 double1 double1 double1 double1 _c34 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_decimal_to_double + Statistics: Num rows: 6 Data size: 1320 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:decimal(38,18), 2:c2:decimal(38,18), 3:c3:decimal(38,18), 4:c4:decimal(38,18), 5:c5:decimal(38,18), 6:c6:decimal(38,18), 7:c7:decimal(38,18), 8:c8:decimal(38,18), 9:c9:decimal(38,18), 10:c10:decimal(38,18), 11:c11:decimal(38,18), 12:c12:float, 13:c13:float, 14:c14:float, 15:c15:float, 16:c16:float, 17:c17:float, 18:c18:float, 19:c19:float, 20:c20:float, 21:c21:float, 22:c22:float, 23:c23:double, 24:c24:double, 25:c25:double, 26:c26:double, 27:c27:double, 28:c28:double, 29:c29:double, 30:c30:double, 31:c31:double, 32:c32:double, 33:c33:double, 34:b:string, 35:part:int, 36:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: decimal(38,18)), c2 (type: decimal(38,18)), c3 (type: decimal(38,18)), c4 (type: decimal(38,18)), c5 (type: decimal(38,18)), c6 (type: decimal(38,18)), c7 (type: decimal(38,18)), c8 (type: decimal(38,18)), c9 (type: decimal(38,18)), c10 (type: decimal(38,18)), c11 (type: decimal(38,18)), c12 (type: float), c13 (type: float), c14 (type: float), c15 (type: float), c16 (type: float), c17 (type: float), c18 (type: float), c19 (type: float), c20 (type: float), c21 (type: float), c22 (type: float), c23 (type: double), c24 (type: double), c25 (type: double), c26 (type: double), c27 (type: double), c28 (type: double), c29 (type: double), c30 (type: double), c31 (type: double), c32 (type: double), c33 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 35, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34] + Statistics: Num rows: 6 Data size: 1320 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 1320 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 35 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34] + dataColumns: insert_num:int, c1:decimal(38,18), c2:decimal(38,18), c3:decimal(38,18), c4:decimal(38,18), c5:decimal(38,18), c6:decimal(38,18), c7:decimal(38,18), c8:decimal(38,18), c9:decimal(38,18), c10:decimal(38,18), c11:decimal(38,18), c12:float, c13:float, c14:float, c15:float, c16:float, c17:float, c18:float, c19:float, c20:float, c21:float, c22:float, c23:double, c24:double, c25:double, c26:double, c27:double, c28:double, c29:double, c30:double, c31:double, c32:double, c33:double, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 b +101 1 1.000000000000000000 -128.000000000000000000 NULL -2147483648.000000000000000000 NULL NULL NULL 99999999999999999999.999999999999999999 99999999999999999999.999900000000000000 99999999999999999999.999900000000000000 134416490068.970120000000000000 1.0 -128.0 NULL -2.14748365E9 NULL 1.0E20 Infinity Infinity 3.4028236E24 3.4028236E24 1.3441649E11 1.0 -128.0 NULL -2.147483648E9 NULL 1.0E20 Infinity 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 1.3441649006897012E11 original +102 1 0.000000000000000000 127.000000000000000000 32767.000000000000000000 2147483647.000000000000000000 9223372036854775807.000000000000000000 NULL NULL -99999999999999999999.999999999999999999 -99999999999999999999.999000000000000000 -99999999999999999999.999000000000000000 126117945050.597000000000000000 0.0 127.0 32767.0 2.14748365E9 9.223372E18 -1.0E20 -Infinity -Infinity -3.4028233E23 -3.4028233E23 1.26117945E11 0.0 127.0 32767.0 2.147483647E9 9.223372036854776E18 -1.0E20 -Infinity -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 1.26117945050597E11 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 1.000000000000000000 23.000000000000000000 834.000000000000000000 203332.000000000000000000 888888857923222.000000000000000000 -100.359780000000000000 30.774000000000000000 66475.561431000000000000 66475.561431000000000000 66475.561431000000000000 270912854.000000000000000000 1.0 23.0 834.0 203332.0 8.8888885E14 66475.56 30.774 -100.35978 -100.35978 -100.35978 2.70912864E8 1.0 23.0 834.0 203332.0 8.88888857923222E14 66475.561431 -100.35978 30.774 30.774 30.774 2.70912854E8 original +105 1 0.000000000000000000 -99.000000000000000000 -28300.000000000000000000 -999992.000000000000000000 -222282153733.000000000000000000 NULL 46114.280000000000000000 9250340.750000000000000000 9250340.750000000000000000 9250340.750000000000000000 663207639.720368500000000000 0.0 -99.0 -28300.0 -999992.0 -2.22282154E11 9250341.0 46114.28 NULL NULL NULL 6.6320762E8 0.0 -99.0 -28300.0 -999992.0 -2.22282153733E11 9250340.75 NULL 46114.28 46114.28 46114.28 6.632076397203685E8 original +111 1 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 new +PREHOOK: query: drop table part_change_various_various_decimal_to_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Output: default@part_change_various_various_decimal_to_double +POSTHOOK: query: drop table part_change_various_various_decimal_to_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Output: default@part_change_various_various_decimal_to_double +PREHOOK: query: CREATE TABLE part_change_various_various_timestamp(insert_num int, c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 DECIMAL(38,18), c9 STRING, c10 CHAR(25), c11 VARCHAR(25), c12 DATE, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_timestamp +POSTHOOK: query: CREATE TABLE part_change_various_various_timestamp(insert_num int, c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 DECIMAL(38,18), c9 STRING, c10 CHAR(25), c11 VARCHAR(25), c12 DATE, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_timestamp +PREHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, timestamp_str, timestamp_str, timestamp_str, date1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, timestamp_str, timestamp_str, timestamp_str, date1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c11 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 b +101 1 true -128 NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.97011 6229-06-28 02:54:28.97011 1950-12-18 original +102 1 false 127 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 2049-12-18 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 true 23 834 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:34:14 2021-09-24 original +105 1 false -99 -28300 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036 1991-01-06 16:20:39.72036 2024-11-11 original +PREHOOK: query: alter table part_change_various_various_timestamp replace columns (insert_num int, c1 TIMESTAMP, c2 TIMESTAMP, c3 TIMESTAMP, c4 TIMESTAMP, c5 TIMESTAMP, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, c11 TIMESTAMP, c12 TIMESTAMP, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Output: default@part_change_various_various_timestamp +POSTHOOK: query: alter table part_change_various_various_timestamp replace columns (insert_num int, c1 TIMESTAMP, c2 TIMESTAMP, c3 TIMESTAMP, c4 TIMESTAMP, c5 TIMESTAMP, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, c11 TIMESTAMP, c12 TIMESTAMP, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Output: default@part_change_various_various_timestamp +PREHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c1 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c10 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c11 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c12 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c2 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c3 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c4 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c5 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c6 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c7 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c8 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c9 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c13 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_timestamp + Statistics: Num rows: 6 Data size: 6973 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:timestamp, 2:c2:timestamp, 3:c3:timestamp, 4:c4:timestamp, 5:c5:timestamp, 6:c6:timestamp, 7:c7:timestamp, 8:c8:timestamp, 9:c9:timestamp, 10:c10:timestamp, 11:c11:timestamp, 12:c12:timestamp, 13:b:string, 14:part:int, 15:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: timestamp), c2 (type: timestamp), c3 (type: timestamp), c4 (type: timestamp), c5 (type: timestamp), c6 (type: timestamp), c7 (type: timestamp), c8 (type: timestamp), c9 (type: timestamp), c10 (type: timestamp), c11 (type: timestamp), c12 (type: timestamp), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 14, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] + Statistics: Num rows: 6 Data size: 4032 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 4032 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 14 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] + dataColumns: insert_num:int, c1:timestamp, c2:timestamp, c3:timestamp, c4:timestamp, c5:timestamp, c6:timestamp, c7:timestamp, c8:timestamp, c9:timestamp, c10:timestamp, c11:timestamp, c12:timestamp, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 b +101 1 1969-12-31 16:00:00.001 1969-12-31 15:59:59.872 NULL 1969-12-06 19:28:36.352 NULL NULL NULL NULL 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.97011 6229-06-28 02:54:28.97011 1950-12-18 00:00:00 original +102 1 1969-12-31 16:00:00 1969-12-31 16:00:00.127 1969-12-31 16:00:32.767 1970-01-25 12:31:23.647 NULL NULL 1969-12-31 16:00:00 NULL 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 2049-12-18 00:00:00 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 1969-12-31 16:00:00.001 1969-12-31 16:00:00.023 1969-12-31 16:00:00.834 1969-12-31 16:03:23.332 NULL 1969-12-31 15:58:19.640220643 1969-12-31 16:00:30.774 1970-01-01 10:27:55.561431 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:34:14 2021-09-24 00:00:00 original +105 1 1969-12-31 16:00:00 1969-12-31 15:59:59.901 1969-12-31 15:59:31.7 1969-12-31 15:43:20.008 1962-12-15 22:57:26.267 NULL 1970-01-01 04:48:34.28 1970-04-17 17:32:20.75 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036 1991-01-06 16:20:39.72036 2024-11-11 00:00:00 original +111 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +PREHOOK: query: drop table part_change_various_various_timestamp +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Output: default@part_change_various_various_timestamp +POSTHOOK: query: drop table part_change_various_various_timestamp +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Output: default@part_change_various_various_timestamp +PREHOOK: query: CREATE TABLE part_change_various_various_date(insert_num int, c1 STRING, c2 CHAR(25), c3 VARCHAR(25), c4 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_date +POSTHOOK: query: CREATE TABLE part_change_various_various_date(insert_num int, c1 STRING, c2 CHAR(25), c3 VARCHAR(25), c4 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_date +PREHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date_str, date_str, date_str, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date_str, date_str, date_str, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 b +101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 original +102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 original +103 1 NULL original +104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 original +105 1 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 original +PREHOOK: query: alter table part_change_various_various_date replace columns (insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Output: default@part_change_various_various_date +POSTHOOK: query: alter table part_change_various_various_date replace columns (insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Output: default@part_change_various_various_date +PREHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date1, date1, date1, date1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date1, date1, date1, date1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c1 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c2 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c3 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c4 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 _c5 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_date + Statistics: Num rows: 6 Data size: 2496 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:date, 2:c2:date, 3:c3:date, 4:c4:date, 5:b:string, 6:part:int, 7:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: date), c2 (type: date), c3 (type: date), c4 (type: date), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 6, 1, 2, 3, 4, 5] + Statistics: Num rows: 6 Data size: 2496 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 2496 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 6 + includeColumns: [0, 1, 2, 3, 4, 5] + dataColumns: insert_num:int, c1:date, c2:date, c3:date, c4:date, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 b +101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-28 original +102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-09 original +103 1 NULL NULL NULL NULL original +104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-02 original +105 1 2024-11-11 2024-11-11 2024-11-11 1991-01-06 original +111 1 1964-01-24 1964-01-24 1964-01-24 1964-01-24 new +PREHOOK: query: drop table part_change_various_various_date +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Output: default@part_change_various_various_date +POSTHOOK: query: drop table part_change_various_various_date +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Output: default@part_change_various_various_date +PREHOOK: query: CREATE TABLE part_change_same_type_different_params(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_same_type_different_params +POSTHOOK: query: CREATE TABLE part_change_same_type_different_params(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_same_type_different_params +PREHOOK: query: CREATE TABLE same_type1_a_txt(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) +row format delimited fields terminated by '|' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@same_type1_a_txt +POSTHOOK: query: CREATE TABLE same_type1_a_txt(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) +row format delimited fields terminated by '|' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@same_type1_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_a.txt' overwrite into table same_type1_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@same_type1_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_a.txt' overwrite into table same_type1_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@same_type1_a_txt +PREHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@same_type1_a_txt +PREHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@same_type1_a_txt +POSTHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).b SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c1 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c1, type:char(12), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c2 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c2, type:char(25), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c3 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c3, type:varchar(25), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c4 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c4, type:varchar(10), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c5 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c5, type:decimal(12,4), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c6 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c6, type:decimal(20,10), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).insert_num SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +same_type1_a_txt.insert_num same_type1_a_txt.c1 same_type1_a_txt.c2 same_type1_a_txt.c3 same_type1_a_txt.c4 same_type1_a_txt.c5 same_type1_a_txt.c6 same_type1_a_txt.b +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Input: default@part_change_same_type_different_params@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Input: default@part_change_same_type_different_params@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 b +1 1 excess corporation believable gateway thumb repe 76855494.3900 NULL original +2 1 mutation hideout authentic blankness sonic disfigure hover know 42093605.0000 3182652.4066800000 original +3 1 hibernation country detox coyotes ball pulse candle direct 1413570.3000 8.3095808155 original +4 1 aquarium encounter existent amuse fuse light headlock bumper adm 5475414.6500 25937.6752563004 original +5 1 123456789012 1234567890123456789012345 1234567890123456789012345 1234567890 12345678.1234 1234567890.0987654321 original +PREHOOK: query: alter table part_change_same_type_different_params replace columns (insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Output: default@part_change_same_type_different_params +POSTHOOK: query: alter table part_change_same_type_different_params replace columns (insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Output: default@part_change_same_type_different_params +PREHOOK: query: CREATE TABLE same_type1_b_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@same_type1_b_txt +POSTHOOK: query: CREATE TABLE same_type1_b_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@same_type1_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_b.txt' overwrite into table same_type1_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@same_type1_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_b.txt' overwrite into table same_type1_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@same_type1_b_txt +PREHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@same_type1_b_txt +PREHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@same_type1_b_txt +POSTHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).b SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c1 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c1, type:char(8), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c2 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c2, type:char(32), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c3 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c3, type:varchar(15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c4 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c4, type:varchar(18), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c5 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c5, type:decimal(10,2), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c6 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c6, type:decimal(25,15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).insert_num SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +same_type1_b_txt.insert_num same_type1_b_txt.c1 same_type1_b_txt.c2 same_type1_b_txt.c3 same_type1_b_txt.c4 same_type1_b_txt.c5 same_type1_b_txt.c6 same_type1_b_txt.b +PREHOOK: query: CREATE TABLE same_type1_c_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@same_type1_c_txt +POSTHOOK: query: CREATE TABLE same_type1_c_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@same_type1_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_c.txt' overwrite into table same_type1_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@same_type1_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_c.txt' overwrite into table same_type1_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@same_type1_c_txt +PREHOOK: query: insert into table part_change_same_type_different_params partition(part=2) select * from same_type1_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@same_type1_c_txt +PREHOOK: Output: default@part_change_same_type_different_params@part=2 +POSTHOOK: query: insert into table part_change_same_type_different_params partition(part=2) select * from same_type1_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@same_type1_c_txt +POSTHOOK: Output: default@part_change_same_type_different_params@part=2 +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).b SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c1 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c1, type:char(8), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c2 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c2, type:char(32), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c3 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c3, type:varchar(15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c4 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c4, type:varchar(18), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c5 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c5, type:decimal(10,2), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c6 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c6, type:decimal(25,15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).insert_num SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +same_type1_c_txt.insert_num same_type1_c_txt.c1 same_type1_c_txt.c2 same_type1_c_txt.c3 same_type1_c_txt.c4 same_type1_c_txt.c5 same_type1_c_txt.c6 same_type1_c_txt.b +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_same_type_different_params + Statistics: Num rows: 13 Data size: 9347 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:char(8), 2:c2:char(32), 3:c3:varchar(15), 4:c4:varchar(18), 5:c5:decimal(10,2), 6:c6:decimal(25,15), 7:b:string, 8:part:int, 9:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: char(8)), c2 (type: char(32)), c3 (type: varchar(15)), c4 (type: varchar(18)), c5 (type: decimal(10,2)), c6 (type: decimal(25,15)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 8, 1, 2, 3, 4, 5, 6, 7] + Statistics: Num rows: 13 Data size: 9347 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 13 Data size: 9347 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat), (VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 8 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7] + dataColumns: insert_num:int, c1:char(8), c2:char(32), c3:varchar(15), c4:varchar(18), c5:decimal(10,2), c6:decimal(25,15), b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Input: default@part_change_same_type_different_params@part=1 +PREHOOK: Input: default@part_change_same_type_different_params@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Input: default@part_change_same_type_different_params@part=1 +POSTHOOK: Input: default@part_change_same_type_different_params@part=2 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 b +1 1 excess corporation believable gateway thumb repe 76855494.39 NULL original +10 2 dreamlan raster devastation association boa lubricant century 5830.99 NULL new +11 2 tiger however center propeller agoni shocking misshapen 6946533.80 31608910.030000000000000 new +12 2 bulgin extensive ambivalent rastled billion zeroes 943084.30 3090297719.717120349328271 new +13 2 12345678 12345678901234567890123456789012 123456789012345 123456789012345678 NULL 1234567890.543210987654321 new +2 1 mutation hideout authentic blankness sonic hover know 42093605.00 3182652.406680000000000 original +3 1 hibernat country detox coyotes ball pulse cand direct 1413570.30 8.309580815500000 original +4 1 aquarium encounter existent amuse fuse light head bumper adm 5475414.65 25937.675256300400000 original +5 1 12345678 1234567890123456789012345 123456789012345 1234567890 12345678.12 1234567890.098765432100000 original +6 1 hollow innocent crabs blushing ambition ebony liquor ageless par NULL 3841833197.314137090000000 new +7 1 wig feel social fork drum search bump conclusion percept 8.31 NULL new +8 1 bubble f drain loyal station racket antique business 0.69 22.832613278741300 new +9 1 12345678 12345678901234567890123456789012 123456789012345 123456789012345678 NULL 1234567890.543210987654321 new +PREHOOK: query: drop table part_change_same_type_different_params +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Output: default@part_change_same_type_different_params +POSTHOOK: query: drop table part_change_same_type_different_params +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Output: default@part_change_same_type_different_params diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_llap_io.q.out new file mode 100644 index 0000000..f71908c --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_llap_io.q.out @@ -0,0 +1,1717 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 +PREHOOK: query: alter table part_add_int_permute_select add columns(c int) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: alter table part_add_int_permute_select add columns(c int) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).c SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 col4 +PREHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_int_permute_select + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:a:int, 2:b:string, 3:c:int, 4:part:int, 5:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 4, 1, 2] + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0, 1, 2] + dataColumns: insert_num:int, a:int, b:string, c:int + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,a,b from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b +1 1 1111 new +2 1 2222 new +PREHOOK: query: select insert_num,part,a,b,c from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c +1 1 1111 new NULL +2 1 2222 new 3333 +PREHOOK: query: select insert_num,part,c from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part c +1 1 NULL +2 1 3333 +PREHOOK: query: drop table part_add_int_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: drop table part_add_int_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 +PREHOOK: query: alter table part_add_int_string_permute_select add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: alter table part_add_int_string_permute_select add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).c SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).d SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 col4 col5 +PREHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_string_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_string_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_int_string_permute_select + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:a:int, 2:b:string, 3:c:int, 4:d:string, 5:part:int, 6:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 5, 1, 2] + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2] + dataColumns: insert_num:int, a:int, b:string, c:int, d:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,a,b from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b +1 1 1111 new +2 1 2222 new +PREHOOK: query: select insert_num,part,a,b,c from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c +1 1 1111 new NULL +2 1 2222 new 3333 +PREHOOK: query: select insert_num,part,a,b,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c d +1 1 1111 new NULL NULL +2 1 2222 new 3333 4444 +PREHOOK: query: select insert_num,part,a,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a c d +1 1 1111 NULL NULL +2 1 2222 3333 4444 +PREHOOK: query: select insert_num,part,a,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a d +1 1 1111 NULL +2 1 2222 4444 +PREHOOK: query: select insert_num,part,c from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part c +1 1 NULL +2 1 3333 +PREHOOK: query: select insert_num,part,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part d +1 1 NULL +2 1 4444 +PREHOOK: query: drop table part_add_int_string_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: drop table part_add_int_string_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num double1 double1 double1 _c4 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_string_group_double + Statistics: Num rows: 5 Data size: 500 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:double, 2:c2:double, 3:c3:double, 4:b:string, 5:part:int, 6:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 5, 1, 2, 3, 4] + Statistics: Num rows: 5 Data size: 500 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 5 Data size: 500 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: insert_num:int, c1:double, c2:double, c3:double, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_string_group_double +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Input: default@part_change_string_group_double@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_string_group_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Input: default@part_change_string_group_double@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 original +102 1 -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 original +103 1 NULL NULL NULL original +104 1 30.774 30.774 30.774 original +105 1 46114.28 46114.28 46114.28 original +PREHOOK: query: drop table part_change_string_group_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: drop table part_change_string_group_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 date1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c11 +PREHOOK: query: alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_date_group_string_group_date_timestamp + Statistics: Num rows: 6 Data size: 12449 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:char(50), 3:c3:char(15), 4:c4:varchar(50), 5:c5:varchar(15), 6:c6:string, 7:c7:char(50), 8:c8:char(15), 9:c9:varchar(50), 10:c10:varchar(15), 11:b:string, 12:part:int, 13:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), c6 (type: string), c7 (type: char(50)), c8 (type: char(15)), c9 (type: varchar(50)), c10 (type: varchar(15)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + Statistics: Num rows: 6 Data size: 8952 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 8952 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 12 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + dataColumns: insert_num:int, c1:string, c2:char(50), c3:char(15), c4:varchar(50), c5:varchar(15), c6:string, c7:char(50), c8:char(15), c9:varchar(50), c10:varchar(15), b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original +102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14.0 1978-08-02 06:34:14.0 1978-08-02 06:3 1978-08-02 06:34:14.0 1978-08-02 06:3 original +105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original +111 1 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: drop table part_change_date_group_string_group_date_timestamp +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 _c21 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 1 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL original +102 1 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 original +105 1 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 original +PREHOOK: query: alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c16 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c17 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c18 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c19 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c20 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_numeric_group_string_group_multi_ints_string_group + Statistics: Num rows: 6 Data size: 1140 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:string, 3:c3:string, 4:c4:string, 5:c5:char(50), 6:c6:char(50), 7:c7:char(50), 8:c8:char(50), 9:c9:char(5), 10:c10:char(5), 11:c11:char(5), 12:c12:char(5), 13:c13:varchar(50), 14:c14:varchar(50), 15:c15:varchar(50), 16:c16:varchar(50), 17:c17:varchar(5), 18:c18:varchar(5), 19:c19:varchar(5), 20:c20:varchar(5), 21:b:string, 22:part:int, 23:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(50)), c8 (type: char(50)), c9 (type: char(5)), c10 (type: char(5)), c11 (type: char(5)), c12 (type: char(5)), c13 (type: varchar(50)), c14 (type: varchar(50)), c15 (type: varchar(50)), c16 (type: varchar(50)), c17 (type: varchar(5)), c18 (type: varchar(5)), c19 (type: varchar(5)), c20 (type: varchar(5)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 22, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + Statistics: Num rows: 6 Data size: 1140 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 1140 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 22 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + dataColumns: insert_num:int, c1:string, c2:string, c3:string, c4:string, c5:char(50), c6:char(50), c7:char(50), c8:char(50), c9:char(5), c10:char(5), c11:char(5), c12:char(5), c13:varchar(50), c14:varchar(50), c15:varchar(50), c16:varchar(50), c17:varchar(5), c18:varchar(5), c19:varchar(5), c20:varchar(5), b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 1 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL original +102 1 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 20333 88888 23 834 203332 888888857923222 23 834 20333 88888 original +105 1 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 original +111 1 filler filler filler filler filler filler filler filler fille fille fille fille filler filler filler filler fille fille fille fille new +PREHOOK: query: drop table part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: drop table part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 _c16 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 original +102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 original +105 1 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 original +PREHOOK: query: alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_numeric_group_string_group_floating_string_group + Statistics: Num rows: 6 Data size: 3300 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:string, 3:c3:string, 4:c4:char(50), 5:c5:char(50), 6:c6:char(50), 7:c7:char(7), 8:c8:char(7), 9:c9:char(7), 10:c10:varchar(50), 11:c11:varchar(50), 12:c12:varchar(50), 13:c13:varchar(7), 14:c14:varchar(7), 15:c15:varchar(7), 16:b:string, 17:part:int, 18:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: char(50)), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(7)), c8 (type: char(7)), c9 (type: char(7)), c10 (type: varchar(50)), c11 (type: varchar(50)), c12 (type: varchar(50)), c13 (type: varchar(7)), c14 (type: varchar(7)), c15 (type: varchar(7)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 17, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + Statistics: Num rows: 6 Data size: 3300 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 3300 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 17 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + dataColumns: insert_num:int, c1:string, c2:string, c3:string, c4:char(50), c5:char(50), c6:char(50), c7:char(7), c8:char(7), c9:char(7), c10:varchar(50), c11:varchar(50), c12:varchar(50), c13:varchar(7), c14:varchar(7), c15:varchar(7), b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original +102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original +105 1 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original +111 1 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c5 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c6 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c7 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c8 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 b +101 1 freckled freckled freckled freckled original +102 1 ox ox ox ox original +103 1 original +104 1 I cooked I cooked I cooked I cooked original +105 1 200 200 200 200 original +PREHOOK: query: alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_string_group_string_group_string + Statistics: Num rows: 6 Data size: 2712 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:char(50), 2:c2:char(9), 3:c3:varchar(50), 4:c4:char(9), 5:c5:varchar(50), 6:c6:varchar(9), 7:c7:string, 8:c8:char(50), 9:c9:char(9), 10:c10:string, 11:b:string, 12:part:int, 13:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: varchar(50)), c4 (type: char(9)), c5 (type: varchar(50)), c6 (type: varchar(9)), c7 (type: string), c8 (type: char(50)), c9 (type: char(9)), c10 (type: string), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + Statistics: Num rows: 6 Data size: 2712 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 2712 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 12 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + dataColumns: insert_num:int, c1:char(50), c2:char(9), c3:varchar(50), c4:char(9), c5:varchar(50), c6:varchar(9), c7:string, c8:char(50), c9:char(9), c10:string, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1 freckled freckled freckled freckled freckled freckled freckled freckled freckled freckled original +102 1 ox ox ox ox ox ox ox ox ox ox original +103 1 original +104 1 I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked original +105 1 200 200 200 200 200 200 200 200 200 200 original +111 1 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_string_group_string_group_string +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: drop table part_change_string_group_string_group_string +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 bigint1 bigint1 bigint1 _c19 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 1 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL original +102 1 127 127 127 127 127 127 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 23 23 23 23 23 834 834 834 834 834 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 original +105 1 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 original +PREHOOK: query: alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c16 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c17 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c18 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_lower_to_higher_numeric_group_tinyint_to_bigint + Statistics: Num rows: 6 Data size: 1060 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:smallint, 2:c2:int, 3:c3:bigint, 4:c4:decimal(38,18), 5:c5:float, 6:c6:double, 7:c7:int, 8:c8:bigint, 9:c9:decimal(38,18), 10:c10:float, 11:c11:double, 12:c12:bigint, 13:c13:decimal(38,18), 14:c14:float, 15:c15:double, 16:c16:decimal(38,18), 17:c17:float, 18:c18:double, 19:b:string, 20:part:int, 21:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: smallint), c2 (type: int), c3 (type: bigint), c4 (type: decimal(38,18)), c5 (type: float), c6 (type: double), c7 (type: int), c8 (type: bigint), c9 (type: decimal(38,18)), c10 (type: float), c11 (type: double), c12 (type: bigint), c13 (type: decimal(38,18)), c14 (type: float), c15 (type: double), c16 (type: decimal(38,18)), c17 (type: float), c18 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 20, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] + Statistics: Num rows: 6 Data size: 1060 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 1060 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 20 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] + dataColumns: insert_num:int, c1:smallint, c2:int, c3:bigint, c4:decimal(38,18), c5:float, c6:double, c7:int, c8:bigint, c9:decimal(38,18), c10:float, c11:double, c12:bigint, c13:decimal(38,18), c14:float, c15:double, c16:decimal(38,18), c17:float, c18:double, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 1 -128 -128 -128 -128.000000000000000000 -128.0 -128.0 NULL NULL NULL NULL NULL -2147483648 -2147483648.000000000000000000 -2.14748365E9 -2.147483648E9 NULL NULL NULL original +102 1 127 127 127 127.000000000000000000 127.0 127.0 32767 32767 32767.000000000000000000 32767.0 32767.0 2147483647 2147483647.000000000000000000 2.14748365E9 2.147483647E9 9223372036854775807.000000000000000000 9.223372E18 9.223372036854776E18 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 23 23 23.000000000000000000 23.0 23.0 834 834 834.000000000000000000 834.0 834.0 203332 203332.000000000000000000 203332.0 203332.0 888888857923222.000000000000000000 8.8888885E14 8.88888857923222E14 original +105 1 -99 -99 -99 -99.000000000000000000 -99.0 -99.0 -28300 -28300 -28300.000000000000000000 -28300.0 -28300.0 -999992 -999992.000000000000000000 -999992.0 -999992.0 -222282153733.000000000000000000 -2.22282154E11 -2.22282153733E11 original +111 1 7000 80000 90000000 1234.567800000000000000 9876.543 789.321 80000 90000000 1234.567800000000000000 9876.543 789.321 90000000 1234.567800000000000000 9876.543 789.321 1234.567800000000000000 9876.543 789.321 new +PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 float1 _c4 +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 99999999999999999999.999999999999999999 99999999999999999999.999999999999999999 Infinity original +102 1 -99999999999999999999.999999999999999999 -99999999999999999999.999999999999999999 -Infinity original +103 1 NULL NULL NULL original +104 1 66475.561431000000000000 66475.561431000000000000 -100.35978 original +105 1 9250340.750000000000000000 9250340.750000000000000000 NULL original +PREHOOK: query: alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_lower_to_higher_numeric_group_decimal_to_float + Statistics: Num rows: 6 Data size: 648 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:float, 2:c2:double, 3:c3:double, 4:b:string, 5:part:int, 6:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: float), c2 (type: double), c3 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 5, 1, 2, 3, 4] + Statistics: Num rows: 6 Data size: 648 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 648 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: insert_num:int, c1:float, c2:double, c3:double, b:string + partitionColumnCount: 1 + partitionColumns: part:int + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 1.0E20 1.0E20 Infinity original +102 1 -1.0E20 -1.0E20 -Infinity original +103 1 NULL NULL NULL original +104 1 66475.56 66475.561431 -100.35978 original +105 1 9250341.0 9250340.75 NULL original +111 1 1234.5677 9876.543 1234.5678 new +PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out index 4329bfe..4f7c2bf 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out @@ -109,9 +109,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -273,9 +274,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -499,9 +501,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -796,9 +799,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 22 @@ -1002,9 +1006,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 17 diff --git ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out new file mode 100644 index 0000000..00c484c --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out @@ -0,0 +1,1421 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_add_int_permute_select +POSTHOOK: Lineage: table_add_int_permute_select.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_add_int_permute_select.b SIMPLE [] +POSTHOOK: Lineage: table_add_int_permute_select.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table_add_int_permute_select add columns(c int) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table_add_int_permute_select +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: alter table table_add_int_permute_select add columns(c int) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table_add_int_permute_select +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_add_int_permute_select +POSTHOOK: Lineage: table_add_int_permute_select.a SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.b SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.c SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.insert_num SCRIPT [] +col1 col2 col3 col4 +PREHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_add_int_permute_select + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:a:int, 2:b:string, 3:c:int, 4:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0, 1, 2] + dataColumns: insert_num:int, a:int, b:string, c:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,a,b from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num a b +101 -2147483648 original +102 2147483647 original +103 NULL original +104 203332 original +105 -999992 original +111 80000 new +PREHOOK: query: select insert_num,a,b,c from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num a b c +101 -2147483648 original NULL +102 2147483647 original NULL +103 NULL original NULL +104 203332 original NULL +105 -999992 original NULL +111 80000 new 80000 +PREHOOK: query: select insert_num,c from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num c +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 80000 +PREHOOK: query: drop table table_add_int_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_add_int_permute_select +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: drop table table_add_int_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_add_int_permute_select +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: Lineage: table_add_int_string_permute_select.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_add_int_string_permute_select.b SIMPLE [] +POSTHOOK: Lineage: table_add_int_string_permute_select.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table_add_int_string_permute_select add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table_add_int_string_permute_select +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: alter table table_add_int_string_permute_select add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table_add_int_string_permute_select +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: Lineage: table_add_int_string_permute_select.a SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.b SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.c SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.d SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.insert_num SCRIPT [] +col1 col2 col3 col4 col5 +PREHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_string_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_string_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_add_int_string_permute_select + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:a:int, 2:b:string, 3:c:int, 4:d:string, 5:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2] + dataColumns: insert_num:int, a:int, b:string, c:int, d:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,a,b from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b +101 -2147483648 original +102 2147483647 original +103 NULL original +104 203332 original +105 -999992 original +111 80000 new +PREHOOK: query: select insert_num,a,b,c from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b c +101 -2147483648 original NULL +102 2147483647 original NULL +103 NULL original NULL +104 203332 original NULL +105 -999992 original NULL +111 80000 new 80000 +PREHOOK: query: select insert_num,a,b,c,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b c d +101 -2147483648 original NULL NULL +102 2147483647 original NULL NULL +103 NULL original NULL NULL +104 203332 original NULL NULL +105 -999992 original NULL NULL +111 80000 new 80000 filler +PREHOOK: query: select insert_num,a,c,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,c,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a c d +101 -2147483648 NULL NULL +102 2147483647 NULL NULL +103 NULL NULL NULL +104 203332 NULL NULL +105 -999992 NULL NULL +111 80000 80000 filler +PREHOOK: query: select insert_num,a,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a d +101 -2147483648 NULL +102 2147483647 NULL +103 NULL NULL +104 203332 NULL +105 -999992 NULL +111 80000 filler +PREHOOK: query: select insert_num,c from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num c +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 80000 +PREHOOK: query: select insert_num,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num d +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 filler +PREHOOK: query: drop table table_add_int_string_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_add_int_string_permute_select +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: drop table table_add_int_string_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_add_int_string_permute_select +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_string_group_double +POSTHOOK: Lineage: table_change_string_group_double.b SIMPLE [] +POSTHOOK: Lineage: table_change_string_group_double.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_string_group_double +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_string_group_double +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_string_group_double +POSTHOOK: Lineage: table_change_string_group_double.b SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,b from table_change_string_group_double +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,b from table_change_string_group_double +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_change_string_group_double + Statistics: Num rows: 6 Data size: 720 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:double, 2:c2:double, 3:c3:double, 4:b:string, 5:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4] + Statistics: Num rows: 6 Data size: 720 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 720 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: insert_num:int, c1:double, c2:double, c3:double, b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_double +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_double +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 original +102 -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 original +103 NULL NULL NULL original +104 30.774 30.774 30.774 original +105 46114.28 46114.28 46114.28 original +111 789.321 789.321 789.321 new +PREHOOK: query: drop table table_change_string_group_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_string_group_double +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: drop table table_change_string_group_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_string_group_double +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 date1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c11 +PREHOOK: query: alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_date_group_string_group_date_group +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_date_group_string_group_date_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original +102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14.0 1978-08-02 06:34:14.0 1978-08-02 06:3 1978-08-02 06:34:14.0 1978-08-02 06:3 original +105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original +111 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_date_group_string_group_date_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_date_group_string_group_date_group +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: drop table table_change_date_group_string_group_date_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 _c21 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL original +102 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 original +105 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 original +PREHOOK: query: alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c16 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c17 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c18 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c19 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c20 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 +PREHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_change_numeric_group_string_group_multi_ints_string_group + Statistics: Num rows: 6 Data size: 15696 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:string, 3:c3:string, 4:c4:string, 5:c5:char(50), 6:c6:char(50), 7:c7:char(50), 8:c8:char(50), 9:c9:char(5), 10:c10:char(5), 11:c11:char(5), 12:c12:char(5), 13:c13:varchar(50), 14:c14:varchar(50), 15:c15:varchar(50), 16:c16:varchar(50), 17:c17:varchar(5), 18:c18:varchar(5), 19:c19:varchar(5), 20:c20:varchar(5), 21:b:string, 22:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(50)), c8 (type: char(50)), c9 (type: char(5)), c10 (type: char(5)), c11 (type: char(5)), c12 (type: char(5)), c13 (type: varchar(50)), c14 (type: varchar(50)), c15 (type: varchar(50)), c16 (type: varchar(50)), c17 (type: varchar(5)), c18 (type: varchar(5)), c19 (type: varchar(5)), c20 (type: varchar(5)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + Statistics: Num rows: 6 Data size: 15696 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 15696 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 22 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] + dataColumns: insert_num:int, c1:string, c2:string, c3:string, c4:string, c5:char(50), c6:char(50), c7:char(50), c8:char(50), c9:char(5), c10:char(5), c11:char(5), c12:char(5), c13:varchar(50), c14:varchar(50), c15:varchar(50), c16:varchar(50), c17:varchar(5), c18:varchar(5), c19:varchar(5), c20:varchar(5), b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL original +102 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 20333 88888 23 834 203332 888888857923222 23 834 20333 88888 original +105 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 original +111 filler filler filler filler filler filler filler filler fille fille fille fille filler filler filler filler fille fille fille fille new +PREHOOK: query: drop table table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: drop table table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 _c16 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 original +102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 original +105 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 original +PREHOOK: query: alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 +PREHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_change_numeric_group_string_group_floating_string_group + Statistics: Num rows: 6 Data size: 10884 Basic stats: COMPLETE Column stats: PARTIAL + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:insert_num:int, 1:c1:string, 2:c2:string, 3:c3:string, 4:c4:char(50), 5:c5:char(50), 6:c6:char(50), 7:c7:char(7), 8:c8:char(7), 9:c9:char(7), 10:c10:varchar(50), 11:c11:varchar(50), 12:c12:varchar(50), 13:c13:varchar(7), 14:c14:varchar(7), 15:c15:varchar(7), 16:b:string, 17:ROW__ID:struct] + Select Operator + expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: char(50)), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(7)), c8 (type: char(7)), c9 (type: char(7)), c10 (type: varchar(50)), c11 (type: varchar(50)), c12 (type: varchar(50)), c13 (type: varchar(7)), c14 (type: varchar(7)), c15 (type: varchar(7)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + Statistics: Num rows: 6 Data size: 10884 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 10884 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] + allNative: false + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] + vectorized: true + rowBatchContext: + dataColumnCount: 17 + includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + dataColumns: insert_num:int, c1:string, c2:string, c3:string, c4:char(50), c5:char(50), c6:char(50), c7:char(7), c8:char(7), c9:char(7), c10:varchar(50), c11:varchar(50), c12:varchar(50), c13:varchar(7), c14:varchar(7), c15:varchar(7), b:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original +102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original +105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original +111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: Lineage: table_change_string_group_string_group_string.b SIMPLE [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c5 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c6 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c7 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c8 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 b +101 freckled freckled freckled freckled original +102 ox ox ox ox original +103 original +104 I cooked I cooked I cooked I cooked original +105 200 200 200 200 original +PREHOOK: query: alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_string_group_string_group_string +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_string_group_string_group_string +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: Lineage: table_change_string_group_string_group_string.b SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 freckled freckled freckled freckled freckled freckled freckled freckled freckled freckled original +102 ox ox ox ox ox ox ox ox ox ox original +103 original +104 I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked original +105 200 200 200 200 200 200 200 200 200 200 original +111 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_string_group_string_group_string +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_string_group_string_group_string +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: drop table table_change_string_group_string_group_string +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_string_group_string_group_string +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.b SIMPLE [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 bigint1 bigint1 bigint1 _c19 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL original +102 127 127 127 127 127 127 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 23 23 23 23 23 834 834 834 834 834 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 original +105 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 original +PREHOOK: query: alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.b SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c16 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c17 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c18 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 -128 -128 -128 -128.000000000000000000 -128.0 -128.0 NULL NULL NULL NULL NULL -2147483648 -2147483648.000000000000000000 -2.14748365E9 -2.147483648E9 NULL NULL NULL original +102 127 127 127 127.000000000000000000 127.0 127.0 32767 32767 32767.000000000000000000 32767.0 32767.0 2147483647 2147483647.000000000000000000 2.14748365E9 2.147483647E9 9223372036854775807.000000000000000000 9.223372E18 9.223372036854776E18 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 23 23 23.000000000000000000 23.0 23.0 834 834 834.000000000000000000 834.0 834.0 203332 203332.000000000000000000 203332.0 203332.0 888888857923222.000000000000000000 8.8888885E14 8.88888857923222E14 original +105 -99 -99 -99 -99.000000000000000000 -99.0 -99.0 -28300 -28300 -28300.000000000000000000 -28300.0 -28300.0 -999992 -999992.000000000000000000 -999992.0 -999992.0 -222282153733.000000000000000000 -2.22282154E11 -2.22282153733E11 original +111 7000 80000 90000000 1234.567800000000000000 9876.543 789.321 80000 90000000 1234.567800000000000000 9876.543 789.321 90000000 1234.567800000000000000 9876.543 789.321 1234.567800000000000000 9876.543 789.321 new +PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.b SIMPLE [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 float1 _c4 +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 99999999999999999999.999999999999999999 99999999999999999999.999999999999999999 Infinity original +102 -99999999999999999999.999999999999999999 -99999999999999999999.999999999999999999 -Infinity original +103 NULL NULL NULL original +104 66475.561431000000000000 66475.561431000000000000 -100.35978 original +105 9250340.750000000000000000 9250340.750000000000000000 NULL original +PREHOOK: query: alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.b SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 1.0E20 1.0E20 Infinity original +102 -1.0E20 -1.0E20 -Infinity original +103 NULL NULL NULL original +104 66475.56 66475.561431 -100.35978 original +105 9250341.0 9250340.75 NULL original +111 1234.5677 9876.543 1234.5678 new +PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex_llap_io.q.out new file mode 100644 index 0000000..c0289ad --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex_llap_io.q.out @@ -0,0 +1,687 @@ +PREHOOK: query: CREATE TABLE part_change_various_various_struct1(insert_num int, s1 STRUCT, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_struct1 +POSTHOOK: query: CREATE TABLE part_change_various_various_struct1(insert_num int, s1 STRUCT, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_struct1 +PREHOOK: query: CREATE TABLE complex_struct1_a_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct1_a_txt +POSTHOOK: query: CREATE TABLE complex_struct1_a_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct1_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_a.txt' overwrite into table complex_struct1_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct1_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_a.txt' overwrite into table complex_struct1_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct1_a_txt +PREHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct1_a_txt +PREHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct1_a_txt +POSTHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).b SIMPLE [(complex_struct1_a_txt)complex_struct1_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).insert_num SIMPLE [(complex_struct1_a_txt)complex_struct1_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).s1 SIMPLE [(complex_struct1_a_txt)complex_struct1_a_txt.FieldSchema(name:s1, type:struct, comment:null), ] +complex_struct1_a_txt.insert_num complex_struct1_a_txt.s1 complex_struct1_a_txt.b +PREHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Input: default@part_change_various_various_struct1@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Input: default@part_change_various_various_struct1@part=1 +#### A masked pattern was here #### +insert_num part s1 b +1 1 {"c1":true,"c2":null,"c3":null,"c4":3244222,"c5":-99999999999,"c6":-29.0764,"c7":4.70614135E8,"c8":470614135,"c9":"dynamic reptile ","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":n)گ} original +2 1 {"c1":null,"c2":100,"c3":null,"c4":14,"c5":-23866739993,"c6":-3651.672,"c7":46114.284799488,"c8":46114.284799488,"c9":" baffling ","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":n)گ} original +3 1 {"c1":false,"c2":72,"c3":null,"c4":-93222,"c5":30,"c6":-66475.56,"c7":-66475.561431,"c8":0.561431,"c9":"1 ","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":n)گ} original +4 1 {"c1":null,"c2":-90,"c3":null,"c4":3289094,"c5":46114,"c6":9250341.0,"c7":9250340.75,"c8":9250340.75,"c9":"junkyard ","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":n)گ} original +PREHOOK: query: alter table part_change_various_various_struct1 replace columns (insert_num int, s1 STRUCT, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Output: default@part_change_various_various_struct1 +POSTHOOK: query: alter table part_change_various_various_struct1 replace columns (insert_num int, s1 STRUCT, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Output: default@part_change_various_various_struct1 +PREHOOK: query: CREATE TABLE complex_struct1_b_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct1_b_txt +POSTHOOK: query: CREATE TABLE complex_struct1_b_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct1_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_b.txt' overwrite into table complex_struct1_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct1_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_b.txt' overwrite into table complex_struct1_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct1_b_txt +PREHOOK: query: insert into table part_change_various_various_struct1 partition(part=2) select * from complex_struct1_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct1_b_txt +PREHOOK: Output: default@part_change_various_various_struct1@part=2 +POSTHOOK: query: insert into table part_change_various_various_struct1 partition(part=2) select * from complex_struct1_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct1_b_txt +POSTHOOK: Output: default@part_change_various_various_struct1@part=2 +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=2).b SIMPLE [(complex_struct1_b_txt)complex_struct1_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=2).insert_num SIMPLE [(complex_struct1_b_txt)complex_struct1_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=2).s1 SIMPLE [(complex_struct1_b_txt)complex_struct1_b_txt.FieldSchema(name:s1, type:struct, comment:null), ] +complex_struct1_b_txt.insert_num complex_struct1_b_txt.s1 complex_struct1_b_txt.b +PREHOOK: query: CREATE TABLE complex_struct1_c_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct1_c_txt +POSTHOOK: query: CREATE TABLE complex_struct1_c_txt(insert_num int, s1 STRUCT, b STRING) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct1_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_c.txt' overwrite into table complex_struct1_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct1_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct1_c.txt' overwrite into table complex_struct1_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct1_c_txt +PREHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct1_c_txt +PREHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: query: insert into table part_change_various_various_struct1 partition(part=1) select * from complex_struct1_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct1_c_txt +POSTHOOK: Output: default@part_change_various_various_struct1@part=1 +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).b SIMPLE [(complex_struct1_c_txt)complex_struct1_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).insert_num SIMPLE [(complex_struct1_c_txt)complex_struct1_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_struct1 PARTITION(part=1).s1 SIMPLE [(complex_struct1_c_txt)complex_struct1_c_txt.FieldSchema(name:s1, type:struct, comment:null), ] +complex_struct1_c_txt.insert_num complex_struct1_c_txt.s1 complex_struct1_c_txt.b +PREHOOK: query: explain vectorization detail +select insert_num,part,s1,b from part_change_various_various_struct1 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,s1,b from part_change_various_various_struct1 +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_struct1 + Statistics: Num rows: 6 Data size: 17227 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), s1 (type: struct), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 6 Data size: 16320 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 16320 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Input: default@part_change_various_various_struct1@part=1 +PREHOOK: Input: default@part_change_various_various_struct1@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,s1,b from part_change_various_various_struct1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Input: default@part_change_various_various_struct1@part=1 +POSTHOOK: Input: default@part_change_various_various_struct1@part=2 +#### A masked pattern was here #### +insert_num part s1 b +1 1 {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"n)گ"} original +2 1 {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"n)گ"} original +3 1 {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"} original +4 1 {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"} original +5 2 {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"} new +6 1 {"c1":"FALSE","c2":"-67","c3":"833","c4":"63993","c5":"1255178165","c6":"905071.0","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":null,"c12":"2016-03-07","c13":"n)گ"} new +PREHOOK: query: drop table part_change_various_various_struct1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_struct1 +PREHOOK: Output: default@part_change_various_various_struct1 +POSTHOOK: query: drop table part_change_various_various_struct1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_struct1 +POSTHOOK: Output: default@part_change_various_various_struct1 +PREHOOK: query: CREATE TABLE part_add_various_various_struct2(insert_num int, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: CREATE TABLE part_add_various_various_struct2(insert_num int, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) + values(1, 'original'), + (2, 'original') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) + values(1, 'original'), + (2, 'original') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).insert_num SCRIPT [] +col1 col2 +PREHOOK: query: select insert_num,part,b from part_add_various_various_struct2 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Input: default@part_add_various_various_struct2@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b from part_add_various_various_struct2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Input: default@part_add_various_various_struct2@part=1 +#### A masked pattern was here #### +insert_num part b +1 1 original +2 1 original +PREHOOK: query: alter table part_add_various_various_struct2 ADD columns (s2 STRUCT) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: alter table part_add_various_various_struct2 ADD columns (s2 STRUCT) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: CREATE TABLE complex_struct2_a_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_a_txt +POSTHOOK: query: CREATE TABLE complex_struct2_a_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_a.txt' overwrite into table complex_struct2_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_a.txt' overwrite into table complex_struct2_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_a_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_a_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_a_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).b SIMPLE [(complex_struct2_a_txt)complex_struct2_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).insert_num SIMPLE [(complex_struct2_a_txt)complex_struct2_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).s2 SIMPLE [(complex_struct2_a_txt)complex_struct2_a_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_a_txt.insert_num complex_struct2_a_txt.b complex_struct2_a_txt.s2 +PREHOOK: query: CREATE TABLE complex_struct2_b_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_b_txt +POSTHOOK: query: CREATE TABLE complex_struct2_b_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_b.txt' overwrite into table complex_struct2_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_b.txt' overwrite into table complex_struct2_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_b_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_b_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_b_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).b SIMPLE [(complex_struct2_b_txt)complex_struct2_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).insert_num SIMPLE [(complex_struct2_b_txt)complex_struct2_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).s2 SIMPLE [(complex_struct2_b_txt)complex_struct2_b_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_b_txt.insert_num complex_struct2_b_txt.b complex_struct2_b_txt.s2 +PREHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Input: default@part_add_various_various_struct2@part=1 +PREHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Input: default@part_add_various_various_struct2@part=1 +POSTHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +insert_num part b s2 +1 1 original NULL +2 1 original NULL +3 1 new NULL +4 1 new NULL +5 2 new {"c1":false,"c2":72,"c3":null,"c4":-93222,"c5":30,"c6":-66475.56,"c7":-66475.561431,"c8":0.561431,"c9":"1 ","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":n)گ} +6 2 new {"c1":null,"c2":-90,"c3":null,"c4":3289094,"c5":46114,"c6":9250341.0,"c7":9250340.75,"c8":9250340.75,"c9":"junkyard ","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":n)گ} +PREHOOK: query: alter table part_add_various_various_struct2 REPLACE columns (insert_num int, b STRING, s2 STRUCT) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: alter table part_add_various_various_struct2 REPLACE columns (insert_num int, b STRING, s2 STRUCT) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: CREATE TABLE complex_struct2_c_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_c_txt +POSTHOOK: query: CREATE TABLE complex_struct2_c_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_c.txt' overwrite into table complex_struct2_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_c.txt' overwrite into table complex_struct2_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_c_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_c_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=2) select * from complex_struct2_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_c_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=2 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).b SIMPLE [(complex_struct2_c_txt)complex_struct2_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).insert_num SIMPLE [(complex_struct2_c_txt)complex_struct2_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=2).s2 SIMPLE [(complex_struct2_c_txt)complex_struct2_c_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_c_txt.insert_num complex_struct2_c_txt.b complex_struct2_c_txt.s2 +PREHOOK: query: CREATE TABLE complex_struct2_d_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct2_d_txt +POSTHOOK: query: CREATE TABLE complex_struct2_d_txt(insert_num int, b STRING, s2 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct2_d_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_d.txt' overwrite into table complex_struct2_d_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct2_d_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct2_d.txt' overwrite into table complex_struct2_d_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct2_d_txt +PREHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_d_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct2_d_txt +PREHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: query: insert into table part_add_various_various_struct2 partition(part=1) select * from complex_struct2_d_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct2_d_txt +POSTHOOK: Output: default@part_add_various_various_struct2@part=1 +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).b SIMPLE [(complex_struct2_d_txt)complex_struct2_d_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).insert_num SIMPLE [(complex_struct2_d_txt)complex_struct2_d_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_various_various_struct2 PARTITION(part=1).s2 SIMPLE [(complex_struct2_d_txt)complex_struct2_d_txt.FieldSchema(name:s2, type:struct, comment:null), ] +complex_struct2_d_txt.insert_num complex_struct2_d_txt.b complex_struct2_d_txt.s2 +PREHOOK: query: explain vectorization detail +select insert_num,part,b,s2 from part_add_various_various_struct2 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,b,s2 from part_add_various_various_struct2 +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_various_various_struct2 + Statistics: Num rows: 8 Data size: 800 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), b (type: string), s2 (type: struct) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 8 Data size: 800 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 8 Data size: 800 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Input: default@part_add_various_various_struct2@part=1 +PREHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s2 from part_add_various_various_struct2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Input: default@part_add_various_various_struct2@part=1 +POSTHOOK: Input: default@part_add_various_various_struct2@part=2 +#### A masked pattern was here #### +insert_num part b s2 +1 1 original NULL +2 1 original NULL +3 1 new NULL +4 1 new NULL +5 2 new {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"} +6 2 new {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"} +7 2 new {"c1":"TRUE","c2":null,"c3":null,"c4":"-100","c5":"953967041","c6":"62.07915","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":null,"c12":null,"c13":"n)گ"} +8 1 new NULL +PREHOOK: query: drop table part_add_various_various_struct2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_various_various_struct2 +PREHOOK: Output: default@part_add_various_various_struct2 +POSTHOOK: query: drop table part_add_various_various_struct2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_various_various_struct2 +POSTHOOK: Output: default@part_add_various_various_struct2 +PREHOOK: query: CREATE TABLE part_add_to_various_various_struct4(insert_num int, b STRING, s3 STRUCT) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_to_various_various_struct4 +POSTHOOK: query: CREATE TABLE part_add_to_various_various_struct4(insert_num int, b STRING, s3 STRUCT) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_to_various_various_struct4 +PREHOOK: query: CREATE TABLE complex_struct4_a_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct4_a_txt +POSTHOOK: query: CREATE TABLE complex_struct4_a_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct4_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_a.txt' overwrite into table complex_struct4_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct4_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_a.txt' overwrite into table complex_struct4_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct4_a_txt +PREHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct4_a_txt +PREHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct4_a_txt +POSTHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).b SIMPLE [(complex_struct4_a_txt)complex_struct4_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).insert_num SIMPLE [(complex_struct4_a_txt)complex_struct4_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).s3 SIMPLE [(complex_struct4_a_txt)complex_struct4_a_txt.FieldSchema(name:s3, type:struct, comment:null), ] +complex_struct4_a_txt.insert_num complex_struct4_a_txt.b complex_struct4_a_txt.s3 +PREHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Input: default@part_add_to_various_various_struct4@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Input: default@part_add_to_various_various_struct4@part=1 +#### A masked pattern was here #### +insert_num part b s3 +1 1 original {"c1":true,"c2":null,"c3":null,"c4":3244222,"c5":-99999999999} +2 1 original {"c1":null,"c2":100,"c3":null,"c4":14,"c5":-23866739993} +PREHOOK: query: alter table part_add_to_various_various_struct4 replace columns (insert_num int, b STRING, s3 STRUCT) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Output: default@part_add_to_various_various_struct4 +POSTHOOK: query: alter table part_add_to_various_various_struct4 replace columns (insert_num int, b STRING, s3 STRUCT) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Output: default@part_add_to_various_various_struct4 +PREHOOK: query: CREATE TABLE complex_struct4_b_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct4_b_txt +POSTHOOK: query: CREATE TABLE complex_struct4_b_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct4_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_b.txt' overwrite into table complex_struct4_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct4_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_b.txt' overwrite into table complex_struct4_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct4_b_txt +PREHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=2) select * from complex_struct4_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct4_b_txt +PREHOOK: Output: default@part_add_to_various_various_struct4@part=2 +POSTHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=2) select * from complex_struct4_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct4_b_txt +POSTHOOK: Output: default@part_add_to_various_various_struct4@part=2 +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=2).b SIMPLE [(complex_struct4_b_txt)complex_struct4_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=2).insert_num SIMPLE [(complex_struct4_b_txt)complex_struct4_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=2).s3 SIMPLE [(complex_struct4_b_txt)complex_struct4_b_txt.FieldSchema(name:s3, type:struct, comment:null), ] +complex_struct4_b_txt.insert_num complex_struct4_b_txt.b complex_struct4_b_txt.s3 +PREHOOK: query: CREATE TABLE complex_struct4_c_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@complex_struct4_c_txt +POSTHOOK: query: CREATE TABLE complex_struct4_c_txt(insert_num int, b STRING, s3 STRUCT) +row format delimited fields terminated by '|' +collection items terminated by ',' +map keys terminated by ':' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@complex_struct4_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_c.txt' overwrite into table complex_struct4_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@complex_struct4_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/complex_struct4_c.txt' overwrite into table complex_struct4_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@complex_struct4_c_txt +PREHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@complex_struct4_c_txt +PREHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: query: insert into table part_add_to_various_various_struct4 partition(part=1) select * from complex_struct4_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@complex_struct4_c_txt +POSTHOOK: Output: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).b SIMPLE [(complex_struct4_c_txt)complex_struct4_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).insert_num SIMPLE [(complex_struct4_c_txt)complex_struct4_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +POSTHOOK: Lineage: part_add_to_various_various_struct4 PARTITION(part=1).s3 SIMPLE [(complex_struct4_c_txt)complex_struct4_c_txt.FieldSchema(name:s3, type:struct, comment:null), ] +complex_struct4_c_txt.insert_num complex_struct4_c_txt.b complex_struct4_c_txt.s3 +PREHOOK: query: explain vectorization detail +select insert_num,part,b,s3 from part_add_to_various_various_struct4 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,b,s3 from part_add_to_various_various_struct4 +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_to_various_various_struct4 + Statistics: Num rows: 4 Data size: 4073 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), b (type: string), s3 (type: struct) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 4 Data size: 3736 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 4 Data size: 3736 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Input: default@part_add_to_various_various_struct4@part=1 +PREHOOK: Input: default@part_add_to_various_various_struct4@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,b,s3 from part_add_to_various_various_struct4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Input: default@part_add_to_various_various_struct4@part=1 +POSTHOOK: Input: default@part_add_to_various_various_struct4@part=2 +#### A masked pattern was here #### +insert_num part b s3 +1 1 original {"c1":true,"c2":null,"c3":null,"c4":3244222,"c5":-99999999999,"c6":null,"c7":null,"c8":null,"c9":null,"c10":null,"c11":null,"c12":null,"c13":null} +2 1 original {"c1":null,"c2":100,"c3":null,"c4":14,"c5":-23866739993,"c6":null,"c7":null,"c8":null,"c9":null,"c10":null,"c11":null,"c12":null,"c13":null} +3 2 new {"c1":true,"c2":null,"c3":null,"c4":-100,"c5":953967041,"c6":62.07915,"c7":718.78,"c8":1,"c9":"verdict ","c10":"verdict","c11":null,"c12":null,"c13":n)گ} +4 1 new {"c1":false,"c2":-67,"c3":833,"c4":63993,"c5":1255178165,"c6":null,"c7":null,"c8":null,"c9":null,"c10":null,"c11":null,"c12":null,"c13":null} +PREHOOK: query: drop table part_add_to_various_various_struct4 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_to_various_various_struct4 +PREHOOK: Output: default@part_add_to_various_various_struct4 +POSTHOOK: query: drop table part_add_to_various_various_struct4 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_to_various_various_struct4 +POSTHOOK: Output: default@part_add_to_various_various_struct4 diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive_llap_io.q.out new file mode 100644 index 0000000..52bfbdc --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive_llap_io.q.out @@ -0,0 +1,1042 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: CREATE TABLE schema_evolution_data_2(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data_2 +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data_2.txt' overwrite into table schema_evolution_data_2 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data_2 +PREHOOK: query: CREATE TABLE part_change_various_various_boolean_to_bigint(insert_num int, + c1 TINYINT, c2 SMALLINT, c3 INT, c4 BIGINT, c5 FLOAT, c6 DOUBLE, c7 DECIMAL(38,18), c8 STRING, c9 TIMESTAMP, + c10 BOOLEAN, c11 SMALLINT, c12 INT, c13 BIGINT, c14 FLOAT, c15 DOUBLE, c16 DECIMAL(38,18), c17 STRING, c18 CHAR(25), c19 VARCHAR(25), c20 TIMESTAMP, + c21 BOOLEAN, c22 TINYINT, c23 INT, c24 BIGINT, c25 FLOAT, c26 DOUBLE, c27 DECIMAL(38,18), c28 STRING, c29 CHAR(25), c30 VARCHAR(25), c31 TIMESTAMP, + c32 BOOLEAN, c33 TINYINT, c34 SMALLINT, c35 BIGINT, c36 FLOAT, c37 DOUBLE, c38 DECIMAL(38,18), c39 STRING, c40 CHAR(25), c41 VARCHAR(25), c42 TIMESTAMP, + c43 BOOLEAN, c44 TINYINT, c45 SMALLINT, c46 INT, c47 FLOAT, c48 DOUBLE, c49 DECIMAL(38,18), c50 STRING, c51 CHAR(25), c52 VARCHAR(25), c53 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint +POSTHOOK: query: CREATE TABLE part_change_various_various_boolean_to_bigint(insert_num int, + c1 TINYINT, c2 SMALLINT, c3 INT, c4 BIGINT, c5 FLOAT, c6 DOUBLE, c7 DECIMAL(38,18), c8 STRING, c9 TIMESTAMP, + c10 BOOLEAN, c11 SMALLINT, c12 INT, c13 BIGINT, c14 FLOAT, c15 DOUBLE, c16 DECIMAL(38,18), c17 STRING, c18 CHAR(25), c19 VARCHAR(25), c20 TIMESTAMP, + c21 BOOLEAN, c22 TINYINT, c23 INT, c24 BIGINT, c25 FLOAT, c26 DOUBLE, c27 DECIMAL(38,18), c28 STRING, c29 CHAR(25), c30 VARCHAR(25), c31 TIMESTAMP, + c32 BOOLEAN, c33 TINYINT, c34 SMALLINT, c35 BIGINT, c36 FLOAT, c37 DOUBLE, c38 DECIMAL(38,18), c39 STRING, c40 CHAR(25), c41 VARCHAR(25), c42 TIMESTAMP, + c43 BOOLEAN, c44 TINYINT, c45 SMALLINT, c46 INT, c47 FLOAT, c48 DOUBLE, c49 DECIMAL(38,18), c50 STRING, c51 CHAR(25), c52 VARCHAR(25), c53 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint +PREHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, boolean_str, timestamp1, + boolean1, smallint1, int1, bigint1, float1, double1, decimal1, tinyint_str, tinyint_str, tinyint_str, timestamp1, + boolean1, tinyint1, int1, bigint1, float1, double1, decimal1, smallint_str, smallint_str, smallint_str, timestamp1, + boolean1, tinyint1, smallint1, bigint1, float1, double1, decimal1, int_str, int_str, int_str, timestamp1, + boolean1, tinyint1, smallint1, int1, float1, double1, decimal1, bigint_str, bigint_str, bigint_str, timestamp1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, boolean_str, timestamp1, + boolean1, smallint1, int1, bigint1, float1, double1, decimal1, tinyint_str, tinyint_str, tinyint_str, timestamp1, + boolean1, tinyint1, int1, bigint1, float1, double1, decimal1, smallint_str, smallint_str, smallint_str, timestamp1, + boolean1, tinyint1, smallint1, bigint1, float1, double1, decimal1, int_str, int_str, int_str, timestamp1, + boolean1, tinyint1, smallint1, int1, float1, double1, decimal1, bigint_str, bigint_str, bigint_str, timestamp1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c18 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c19 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c21 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c22 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c23 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c24 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c25 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c26 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c27 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c28 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c29 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c30 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c31 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c32 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c33 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c34 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c35 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c36 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c37 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c38 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c39 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c40 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c41 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c42 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c43 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c44 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c45 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c46 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c47 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c48 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c49 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c50 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c51 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c52 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c53 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 _col22 _col23 _col24 _col25 _col26 _col27 _col28 _col29 _col30 _col31 _col32 _col33 _col34 _col35 _col36 _col37 _col38 _col39 _col40 _col41 _col42 _col43 _col44 _col45 _col46 _col47 _col48 _col49 _col50 _col51 _col52 _col53 _col54 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 c34 c35 c36 c37 c38 c39 c40 c41 c42 c43 c44 c45 c46 c47 c48 c49 c50 c51 c52 c53 b +101 1 -128 NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 true 6229-06-28 02:54:28.970117179 true NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 -128 -128 -128 6229-06-28 02:54:28.970117179 true -128 -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 –32768 –32768 –32768 6229-06-28 02:54:28.970117179 true -128 NULL NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 -2147483648 -2147483648 -2147483648 6229-06-28 02:54:28.970117179 true -128 NULL -2147483648 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 –9223372036854775808 –9223372036854775808 –9223372036854775808 6229-06-28 02:54:28.970117179 original +102 1 127 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 false 5966-07-09 03:30:50.597 false 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 127 127 127 5966-07-09 03:30:50.597 false 127 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 32767 32767 32767 5966-07-09 03:30:50.597 false 127 32767 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 2147483647 2147483647 2147483647 5966-07-09 03:30:50.597 false 127 32767 2147483647 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 9223372036854775807 9223372036854775807 9223372036854775807 5966-07-09 03:30:50.597 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 true 1978-08-02 06:34:14 true 834 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 23 23 23 1978-08-02 06:34:14 true 23 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 834 834 834 1978-08-02 06:34:14 true 23 834 888888857923222 -100.35978 30.774 66475.561431000000000000 203332 203332 203332 1978-08-02 06:34:14 true 23 834 203332 -100.35978 30.774 66475.561431000000000000 888888857923222 888888857923222 888888857923222 1978-08-02 06:34:14 original +105 1 -99 -28300 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 false 1991-01-06 16:20:39.72036854 false -28300 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 -99 -99 -99 1991-01-06 16:20:39.72036854 false -99 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 -28300 -28300 -28300 1991-01-06 16:20:39.72036854 false -99 -28300 -222282153733 NULL 46114.28 9250340.750000000000000000 -999992 -999992 -999992 1991-01-06 16:20:39.72036854 false -99 -28300 -999992 NULL 46114.28 9250340.750000000000000000 -222282153733 -222282153733 -222282153733 1991-01-06 16:20:39.72036854 original +PREHOOK: query: alter table part_change_various_various_boolean_to_bigint replace columns (insert_num int, + c1 BOOLEAN, c2 BOOLEAN, c3 BOOLEAN, c4 BOOLEAN, c5 BOOLEAN, c6 BOOLEAN, c7 BOOLEAN, c8 BOOLEAN, c9 BOOLEAN, + c10 TINYINT, c11 TINYINT, c12 TINYINT, c13 TINYINT, c14 TINYINT, c15 TINYINT, c16 TINYINT, c17 TINYINT, c18 TINYINT, c19 TINYINT, c20 TINYINT, + c21 SMALLINT, c22 SMALLINT, c23 SMALLINT, c24 SMALLINT, c25 SMALLINT, c26 SMALLINT, c27 SMALLINT, c28 SMALLINT, c29 SMALLINT, c30 SMALLINT, c31 SMALLINT, + c32 INT, c33 INT, c34 INT, c35 INT, c36 INT, c37 INT, c38 INT, c39 INT, c40 INT, c41 INT, c42 INT, + c43 BIGINT, c44 BIGINT, c45 BIGINT, c46 BIGINT, c47 BIGINT, c48 BIGINT, c49 BIGINT, c50 BIGINT, c51 BIGINT, c52 BIGINT, c53 BIGINT, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint +POSTHOOK: query: alter table part_change_various_various_boolean_to_bigint replace columns (insert_num int, + c1 BOOLEAN, c2 BOOLEAN, c3 BOOLEAN, c4 BOOLEAN, c5 BOOLEAN, c6 BOOLEAN, c7 BOOLEAN, c8 BOOLEAN, c9 BOOLEAN, + c10 TINYINT, c11 TINYINT, c12 TINYINT, c13 TINYINT, c14 TINYINT, c15 TINYINT, c16 TINYINT, c17 TINYINT, c18 TINYINT, c19 TINYINT, c20 TINYINT, + c21 SMALLINT, c22 SMALLINT, c23 SMALLINT, c24 SMALLINT, c25 SMALLINT, c26 SMALLINT, c27 SMALLINT, c28 SMALLINT, c29 SMALLINT, c30 SMALLINT, c31 SMALLINT, + c32 INT, c33 INT, c34 INT, c35 INT, c36 INT, c37 INT, c38 INT, c39 INT, c40 INT, c41 INT, c42 INT, + c43 BIGINT, c44 BIGINT, c45 BIGINT, c46 BIGINT, c47 BIGINT, c48 BIGINT, c49 BIGINT, c50 BIGINT, c51 BIGINT, c52 BIGINT, c53 BIGINT, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint +PREHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, + bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, + 'new' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_various_various_boolean_to_bigint partition(part=1) SELECT insert_num, + boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, boolean1, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, int1, + bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, bigint1, + 'new' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint@part=1 +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c21 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c22 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c23 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c24 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c25 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c26 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c27 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c28 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c29 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c30 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c31 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c32 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c33 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c34 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c35 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c36 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c37 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c38 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c39 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c40 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c41 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c42 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c43 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c44 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c45 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c46 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c47 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c48 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c49 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c50 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c51 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c52 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c53 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_boolean_to_bigint PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 boolean1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 int1 int1 int1 int1 int1 int1 int1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 bigint1 _c54 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_boolean_to_bigint + Statistics: Num rows: 10 Data size: 1228 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: boolean), c2 (type: boolean), c3 (type: boolean), c4 (type: boolean), c5 (type: boolean), c6 (type: boolean), c7 (type: boolean), c8 (type: boolean), c9 (type: boolean), c10 (type: tinyint), c11 (type: tinyint), c12 (type: tinyint), c13 (type: tinyint), c14 (type: tinyint), c15 (type: tinyint), c16 (type: tinyint), c17 (type: tinyint), c18 (type: tinyint), c19 (type: tinyint), c20 (type: tinyint), c21 (type: smallint), c22 (type: smallint), c23 (type: smallint), c24 (type: smallint), c25 (type: smallint), c26 (type: smallint), c27 (type: smallint), c28 (type: smallint), c29 (type: smallint), c30 (type: smallint), c31 (type: smallint), c32 (type: int), c33 (type: int), c34 (type: int), c35 (type: int), c36 (type: int), c37 (type: int), c38 (type: int), c39 (type: int), c40 (type: int), c41 (type: int), c42 (type: int), c43 (type: bigint), c44 (type: bigint), c45 (type: bigint), c46 (type: bigint), c47 (type: bigint), c48 (type: bigint), c49 (type: bigint), c50 (type: bigint), c51 (type: bigint), c52 (type: bigint), c53 (type: bigint), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44, _col45, _col46, _col47, _col48, _col49, _col50, _col51, _col52, _col53, _col54, _col55 + Statistics: Num rows: 10 Data size: 1228 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 10 Data size: 1228 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,c34,c35,c36,c37,c38,c39,c40,c41,c42,c43,c44,c45,c46,c47,c48,c49,c50,c51,c52,c53,b from part_change_various_various_boolean_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 c34 c35 c36 c37 c38 c39 c40 c41 c42 c43 c44 c45 c46 c47 c48 c49 c50 c51 c52 c53 b +101 1 NULL NULL NULL NULL NULL NULL NULL true NULL NULL -128 -128 -128 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +101 1 true NULL true NULL true true true true true 1 NULL 0 NULL -1 -1 NULL -128 -128 -128 84 1 -128 0 NULL -1 -1 NULL NULL NULL NULL -8620 1 -128 NULL NULL 2147483647 2147483647 NULL -2147483648 -2147483648 -2147483648 1272503892 1 -128 NULL -2147483648 9223372036854775807 9223372036854775807 NULL NULL NULL NULL 134416490068 original +102 1 NULL NULL NULL NULL NULL NULL NULL false NULL NULL 127 127 127 127 127 127 127 127 127 NULL NULL NULL 32767 32767 32767 32767 32767 32767 32767 32767 NULL NULL NULL NULL 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 NULL NULL NULL NULL NULL 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 9223372036854775807 NULL new +102 1 true true true true true true true false true 0 -1 -1 -1 0 0 NULL 127 127 127 -38 0 127 -1 -1 0 0 NULL 32767 32767 32767 7898 0 127 32767 -1 -2147483648 -2147483648 NULL 2147483647 2147483647 2147483647 1563893466 0 127 32767 2147483647 -9223372036854775808 -9223372036854775808 NULL 9223372036854775807 9223372036854775807 9223372036854775807 126117945050 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +103 1 NULL NULL NULL NULL NULL NULL NULL false NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 NULL NULL NULL NULL NULL NULL NULL true NULL NULL 23 23 23 23 23 23 23 23 23 NULL NULL NULL 834 834 834 834 834 834 834 834 NULL NULL NULL NULL 203332 203332 203332 203332 203332 203332 203332 NULL NULL NULL NULL NULL 888888847499264 888888857923222 888888857923222 888888857923222 888888857923222 888888857923222 NULL new +104 1 true true true true true true true true true 1 66 68 -106 -100 30 NULL 23 23 23 86 1 23 6724 3734 -100 30 NULL 834 834 834 -12970 1 23 834 -1868624234 -100 30 66475 203332 203332 203332 270912854 1 23 834 203332 -100 30 66475 888888857923222 888888857923222 888888857923222 270912854 original +105 1 NULL NULL NULL NULL NULL NULL NULL false NULL NULL -99 -99 -99 -99 -99 -99 -99 -99 -99 NULL NULL NULL -28300 -28300 -28300 -28300 -28300 -28300 -28300 -28300 NULL NULL NULL NULL -999992 -999992 -999992 -999992 -999992 -999992 -999992 NULL NULL NULL NULL NULL -222282153984 -222282153733 -222282153733 -222282153733 -222282153733 -222282153733 NULL new +105 1 true true true true NULL true true false true 0 116 -56 -5 NULL 34 NULL -99 -99 -99 -41 0 -99 -16952 -32517 NULL -19422 NULL -28300 -28300 -28300 -16681 0 -99 -28300 1056145659 NULL 46114 9250340 -999992 -999992 -999992 663207639 0 -99 -28300 -999992 NULL 46114 9250340 -222282153733 -222282153733 -222282153733 663207639 original +PREHOOK: query: drop table part_change_various_various_boolean_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_boolean_to_bigint +PREHOOK: Output: default@part_change_various_various_boolean_to_bigint +POSTHOOK: query: drop table part_change_various_various_boolean_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint +POSTHOOK: Output: default@part_change_various_various_boolean_to_bigint +PREHOOK: query: CREATE TABLE part_change_various_various_decimal_to_double(insert_num int, + c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 STRING, c9 CHAR(25), c10 VARCHAR(25), c11 TIMESTAMP, + c12 BOOLEAN, c13 TINYINT, c14 SMALLINT, c15 INT, c16 BIGINT, c17 DECIMAL(38,18), c18 DOUBLE, c19 STRING, c20 CHAR(25), c21 VARCHAR(25), c22 TIMESTAMP, + c23 BOOLEAN, c24 TINYINT, c25 SMALLINT, c26 INT, c27 BIGINT, c28 DECIMAL(38,18), c29 FLOAT, c30 STRING, c31 CHAR(25), c32 VARCHAR(25), c33 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_decimal_to_double +POSTHOOK: query: CREATE TABLE part_change_various_various_decimal_to_double(insert_num int, + c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 STRING, c9 CHAR(25), c10 VARCHAR(25), c11 TIMESTAMP, + c12 BOOLEAN, c13 TINYINT, c14 SMALLINT, c15 INT, c16 BIGINT, c17 DECIMAL(38,18), c18 DOUBLE, c19 STRING, c20 CHAR(25), c21 VARCHAR(25), c22 TIMESTAMP, + c23 BOOLEAN, c24 TINYINT, c25 SMALLINT, c26 INT, c27 BIGINT, c28 DECIMAL(38,18), c29 FLOAT, c30 STRING, c31 CHAR(25), c32 VARCHAR(25), c33 TIMESTAMP, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_decimal_to_double +PREHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal_str, decimal_str, decimal_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, double1, float_str, float_str, float_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, float1, double_str, double_str, double_str, timestamp1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal_str, decimal_str, decimal_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, double1, float_str, float_str, float_str, timestamp1, + boolean1, tinyint1, smallint1, int1, bigint1, decimal1, float1, double_str, double_str, double_str, timestamp1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c20 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c21 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c22 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c23 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c24 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c25 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c26 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c27 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c28 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c29 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c30 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c31 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c32 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c33 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 _col22 _col23 _col24 _col25 _col26 _col27 _col28 _col29 _col30 _col31 _col32 _col33 _col34 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 b +101 1 true -128 NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 99999999999999999999.9999 99999999999999999999.9999 6229-06-28 02:54:28.970117179 true -128 NULL -2147483648 NULL 99999999999999999999.999999999999999999 1.7976931348623157E308 340282347000000000000000000000000000000000 3402823470000000000000000 3402823470000000000000000 6229-06-28 02:54:28.970117179 true -128 NULL -2147483648 NULL 99999999999999999999.999999999999999999 Infinity 1.79769313486231570E+308 1.79769313486231570E+308 1.79769313486231570E+308 6229-06-28 02:54:28.970117179 original +102 1 false 127 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -99999999999999999999.999 -99999999999999999999.999 5966-07-09 03:30:50.597 false 127 32767 2147483647 9223372036854775807 -99999999999999999999.999999999999999999 -1.7976931348623157E308 -340282347000000000000000000000000000000000 -340282347000000000000000 -340282347000000000000000 5966-07-09 03:30:50.597 false 127 32767 2147483647 9223372036854775807 -99999999999999999999.999999999999999999 -Infinity -1.79769313486231570E+308 -1.79769313486231570E+308 -1.79769313486231570E+308 5966-07-09 03:30:50.597 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 true 23 834 203332 888888857923222 -100.35978 30.774 66475.561431 66475.561431 66475.561431 1978-08-02 06:34:14 true 23 834 203332 888888857923222 66475.561431000000000000 30.774 -100.3597812 -100.3597812 -100.3597812 1978-08-02 06:34:14 true 23 834 203332 888888857923222 66475.561431000000000000 -100.35978 30.774 30.774 30.774 1978-08-02 06:34:14 original +105 1 false -99 -28300 -999992 -222282153733 NULL 46114.28 9250340.75 9250340.75 9250340.75 1991-01-06 16:20:39.72036854 false -99 -28300 -999992 -222282153733 9250340.750000000000000000 46114.28 –32768 –32768 –32768 1991-01-06 16:20:39.72036854 false -99 -28300 -999992 -222282153733 9250340.750000000000000000 NULL 46114.28 46114.28 46114.28 1991-01-06 16:20:39.72036854 original +PREHOOK: query: alter table part_change_various_various_decimal_to_double replace columns (insert_num int, + c1 DECIMAL(38,18), c2 DECIMAL(38,18), c3 DECIMAL(38,18), c4 DECIMAL(38,18), c5 DECIMAL(38,18), c6 DECIMAL(38,18), c7 DECIMAL(38,18), c8 DECIMAL(38,18), c9 DECIMAL(38,18), c10 DECIMAL(38,18), c11 DECIMAL(38,18), + c12 FLOAT, c13 FLOAT, c14 FLOAT, c15 FLOAT, c16 FLOAT, c17 FLOAT, c18 FLOAT, c19 FLOAT, c20 FLOAT, c21 FLOAT, c22 FLOAT, + c23 DOUBLE, c24 DOUBLE, c25 DOUBLE, c26 DOUBLE, c27 DOUBLE, c28 DOUBLE, c29 DOUBLE, c30 DOUBLE, c31 DOUBLE, c32 DOUBLE, c33 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Output: default@part_change_various_various_decimal_to_double +POSTHOOK: query: alter table part_change_various_various_decimal_to_double replace columns (insert_num int, + c1 DECIMAL(38,18), c2 DECIMAL(38,18), c3 DECIMAL(38,18), c4 DECIMAL(38,18), c5 DECIMAL(38,18), c6 DECIMAL(38,18), c7 DECIMAL(38,18), c8 DECIMAL(38,18), c9 DECIMAL(38,18), c10 DECIMAL(38,18), c11 DECIMAL(38,18), + c12 FLOAT, c13 FLOAT, c14 FLOAT, c15 FLOAT, c16 FLOAT, c17 FLOAT, c18 FLOAT, c19 FLOAT, c20 FLOAT, c21 FLOAT, c22 FLOAT, + c23 DOUBLE, c24 DOUBLE, c25 DOUBLE, c26 DOUBLE, c27 DOUBLE, c28 DOUBLE, c29 DOUBLE, c30 DOUBLE, c31 DOUBLE, c32 DOUBLE, c33 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Output: default@part_change_various_various_decimal_to_double +PREHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, + float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, + double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, + 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: query: insert into table part_change_various_various_decimal_to_double partition(part=1) SELECT insert_num, + decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, decimal1, + float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, float1, + double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, double1, + 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@part_change_various_various_decimal_to_double@part=1 +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c10 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c11 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c12 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c13 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c14 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c15 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c16 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c17 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c18 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c19 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c2 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c20 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c21 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c22 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c23 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c24 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c25 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c26 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c27 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c28 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c29 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c3 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c30 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c31 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c32 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c33 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c4 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c5 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c6 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c7 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c8 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).c9 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_decimal_to_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 decimal1 float1 float1 float1 float1 float1 float1 float1 float1 float1 float1 float1 double1 double1 double1 double1 double1 double1 double1 double1 double1 double1 double1 _c34 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_decimal_to_double + Statistics: Num rows: 6 Data size: 1320 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: decimal(38,18)), c2 (type: decimal(38,18)), c3 (type: decimal(38,18)), c4 (type: decimal(38,18)), c5 (type: decimal(38,18)), c6 (type: decimal(38,18)), c7 (type: decimal(38,18)), c8 (type: decimal(38,18)), c9 (type: decimal(38,18)), c10 (type: decimal(38,18)), c11 (type: decimal(38,18)), c12 (type: float), c13 (type: float), c14 (type: float), c15 (type: float), c16 (type: float), c17 (type: float), c18 (type: float), c19 (type: float), c20 (type: float), c21 (type: float), c22 (type: float), c23 (type: double), c24 (type: double), c25 (type: double), c26 (type: double), c27 (type: double), c28 (type: double), c29 (type: double), c30 (type: double), c31 (type: double), c32 (type: double), c33 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35 + Statistics: Num rows: 6 Data size: 1320 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 1320 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,c21,c22,c23,c24,c25,c26,c27,c28,c29,c30,c31,c32,c33,b from part_change_various_various_decimal_to_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Input: default@part_change_various_various_decimal_to_double@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 b +101 1 1.000000000000000000 -128.000000000000000000 NULL -2147483648.000000000000000000 NULL NULL NULL 99999999999999999999.999999999999999999 99999999999999999999.999900000000000000 99999999999999999999.999900000000000000 134416490068.970120000000000000 1.0 -128.0 NULL -2.14748365E9 NULL 1.0E20 Infinity Infinity 3.4028236E24 3.4028236E24 1.3441649E11 1.0 -128.0 NULL -2.147483648E9 NULL 1.0E20 Infinity 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 1.3441649006897012E11 original +102 1 0.000000000000000000 127.000000000000000000 32767.000000000000000000 2147483647.000000000000000000 9223372036854775807.000000000000000000 NULL NULL -99999999999999999999.999999999999999999 -99999999999999999999.999000000000000000 -99999999999999999999.999000000000000000 126117945050.597000000000000000 0.0 127.0 32767.0 2.14748365E9 9.223372E18 -1.0E20 -Infinity -Infinity -3.4028233E23 -3.4028233E23 1.26117945E11 0.0 127.0 32767.0 2.147483647E9 9.223372036854776E18 -1.0E20 -Infinity -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 1.26117945050597E11 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 1.000000000000000000 23.000000000000000000 834.000000000000000000 203332.000000000000000000 888888857923222.000000000000000000 -100.359780000000000000 30.774000000000000000 66475.561431000000000000 66475.561431000000000000 66475.561431000000000000 270912854.000000000000000000 1.0 23.0 834.0 203332.0 8.8888885E14 66475.56 30.774 -100.35978 -100.35978 -100.35978 2.70912864E8 1.0 23.0 834.0 203332.0 8.88888857923222E14 66475.561431 -100.35977935791016 30.774 30.774 30.774 2.70912854E8 original +105 1 0.000000000000000000 -99.000000000000000000 -28300.000000000000000000 -999992.000000000000000000 -222282153733.000000000000000000 NULL 46114.280000000000000000 9250340.750000000000000000 9250340.750000000000000000 9250340.750000000000000000 663207639.720368500000000000 0.0 -99.0 -28300.0 -999992.0 -2.22282154E11 9250341.0 46114.28 NULL NULL NULL 6.6320762E8 0.0 -99.0 -28300.0 -999992.0 -2.22282153733E11 9250340.75 NULL 46114.28 46114.28 46114.28 6.632076397203685E8 original +111 1 NULL NULL NULL -46114.000000000000000000 -46114.000000000000000000 -46114.285000000000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 NULL NULL NULL NULL NULL NULL -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 NULL NULL NULL NULL NULL NULL -9.0E-8 -9.000000034120603E-8 -9.0E-8 -9.0E-8 -9.0E-8 NULL new +PREHOOK: query: drop table part_change_various_various_decimal_to_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_decimal_to_double +PREHOOK: Output: default@part_change_various_various_decimal_to_double +POSTHOOK: query: drop table part_change_various_various_decimal_to_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_decimal_to_double +POSTHOOK: Output: default@part_change_various_various_decimal_to_double +PREHOOK: query: CREATE TABLE part_change_various_various_timestamp(insert_num int, c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 DECIMAL(38,18), c9 STRING, c10 CHAR(25), c11 VARCHAR(25), c12 DATE, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_timestamp +POSTHOOK: query: CREATE TABLE part_change_various_various_timestamp(insert_num int, c1 BOOLEAN, c2 TINYINT, c3 SMALLINT, c4 INT, c5 BIGINT, c6 FLOAT, c7 DOUBLE, c8 DECIMAL(38,18), c9 STRING, c10 CHAR(25), c11 VARCHAR(25), c12 DATE, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_timestamp +PREHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, timestamp_str, timestamp_str, timestamp_str, date1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, boolean1, tinyint1, smallint1, int1, bigint1, float1, double1, decimal1, timestamp_str, timestamp_str, timestamp_str, date1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:boolean1, type:boolean, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c11 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 b +101 1 true -128 NULL -2147483648 NULL Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.97011 6229-06-28 02:54:28.97011 1950-12-18 original +102 1 false 127 32767 2147483647 9223372036854775807 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 2049-12-18 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 true 23 834 203332 888888857923222 -100.35978 30.774 66475.561431000000000000 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:34:14 2021-09-24 original +105 1 false -99 -28300 -999992 -222282153733 NULL 46114.28 9250340.750000000000000000 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036 1991-01-06 16:20:39.72036 2024-11-11 original +PREHOOK: query: alter table part_change_various_various_timestamp replace columns (insert_num int, c1 TIMESTAMP, c2 TIMESTAMP, c3 TIMESTAMP, c4 TIMESTAMP, c5 TIMESTAMP, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, c11 TIMESTAMP, c12 TIMESTAMP, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Output: default@part_change_various_various_timestamp +POSTHOOK: query: alter table part_change_various_various_timestamp replace columns (insert_num int, c1 TIMESTAMP, c2 TIMESTAMP, c3 TIMESTAMP, c4 TIMESTAMP, c5 TIMESTAMP, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, c11 TIMESTAMP, c12 TIMESTAMP, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Output: default@part_change_various_various_timestamp +PREHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: query: insert into table part_change_various_various_timestamp partition(part=1) SELECT insert_num, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@part_change_various_various_timestamp@part=1 +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c1 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c10 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c11 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c12 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c2 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c3 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c4 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c5 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c6 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c7 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c8 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).c9 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_timestamp PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c13 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_timestamp + Statistics: Num rows: 6 Data size: 4915 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: timestamp), c2 (type: timestamp), c3 (type: timestamp), c4 (type: timestamp), c5 (type: timestamp), c6 (type: timestamp), c7 (type: timestamp), c8 (type: timestamp), c9 (type: timestamp), c10 (type: timestamp), c11 (type: timestamp), c12 (type: timestamp), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14 + Statistics: Num rows: 6 Data size: 4032 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 4032 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,b from part_change_various_various_timestamp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Input: default@part_change_various_various_timestamp@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 b +101 1 1969-12-31 16:00:00.001 1969-12-31 15:59:59.872 NULL 1969-12-06 19:28:36.352 NULL NULL NULL NULL 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.97011 6229-06-28 02:54:28.97011 1950-12-18 00:00:00 original +102 1 1969-12-31 16:00:00 1969-12-31 16:00:00.127 1969-12-31 16:00:32.767 1970-01-25 12:31:23.647 NULL NULL 1969-12-31 16:00:00 NULL 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 2049-12-18 00:00:00 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 1969-12-31 16:00:00.001 1969-12-31 16:00:00.023 1969-12-31 16:00:00.834 1969-12-31 16:03:23.332 NULL 1969-12-31 15:58:19.640220643 1969-12-31 16:00:30.774 1970-01-01 10:27:55.561431 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:34:14 2021-09-24 00:00:00 original +105 1 1969-12-31 16:00:00 1969-12-31 15:59:59.901 1969-12-31 15:59:31.7 1969-12-31 15:43:20.008 1962-12-15 22:57:26.267 NULL 1970-01-01 04:48:34.28 1970-04-17 17:32:20.75 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036 1991-01-06 16:20:39.72036 2024-11-11 00:00:00 original +111 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +PREHOOK: query: drop table part_change_various_various_timestamp +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_timestamp +PREHOOK: Output: default@part_change_various_various_timestamp +POSTHOOK: query: drop table part_change_various_various_timestamp +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_timestamp +POSTHOOK: Output: default@part_change_various_various_timestamp +PREHOOK: query: CREATE TABLE part_change_various_various_date(insert_num int, c1 STRING, c2 CHAR(25), c3 VARCHAR(25), c4 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_various_various_date +POSTHOOK: query: CREATE TABLE part_change_various_various_date(insert_num int, c1 STRING, c2 CHAR(25), c3 VARCHAR(25), c4 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_various_various_date +PREHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date_str, date_str, date_str, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date_str, date_str, date_str, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 b +101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 original +102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 original +103 1 NULL original +104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 original +105 1 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 original +PREHOOK: query: alter table part_change_various_various_date replace columns (insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Output: default@part_change_various_various_date +POSTHOOK: query: alter table part_change_various_various_date replace columns (insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Output: default@part_change_various_various_date +PREHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date1, date1, date1, date1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data_2 +PREHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: query: insert into table part_change_various_various_date partition(part=1) SELECT insert_num, date1, date1, date1, date1, 'new' FROM schema_evolution_data_2 WHERE insert_num=111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data_2 +POSTHOOK: Output: default@part_change_various_various_date@part=1 +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c1 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c2 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c3 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).c4 SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_various_various_date PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data_2)schema_evolution_data_2.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 _c5 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_various_various_date + Statistics: Num rows: 6 Data size: 2496 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: date), c2 (type: date), c3 (type: date), c4 (type: date), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 6 Data size: 2496 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 2496 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_various_various_date +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Input: default@part_change_various_various_date@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 b +101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-28 original +102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-09 original +103 1 NULL NULL NULL NULL original +104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-02 original +105 1 2024-11-11 2024-11-11 2024-11-11 1991-01-06 original +111 1 1964-01-24 1964-01-24 1964-01-24 NULL new +PREHOOK: query: drop table part_change_various_various_date +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_various_various_date +PREHOOK: Output: default@part_change_various_various_date +POSTHOOK: query: drop table part_change_various_various_date +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_various_various_date +POSTHOOK: Output: default@part_change_various_various_date +PREHOOK: query: CREATE TABLE part_change_same_type_different_params(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_same_type_different_params +POSTHOOK: query: CREATE TABLE part_change_same_type_different_params(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_same_type_different_params +PREHOOK: query: CREATE TABLE same_type1_a_txt(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) +row format delimited fields terminated by '|' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@same_type1_a_txt +POSTHOOK: query: CREATE TABLE same_type1_a_txt(insert_num int, c1 CHAR(12), c2 CHAR(25), c3 VARCHAR(25), c4 VARCHAR(10), c5 DECIMAL(12,4), c6 DECIMAL(20,10), b STRING) +row format delimited fields terminated by '|' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@same_type1_a_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_a.txt' overwrite into table same_type1_a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@same_type1_a_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_a.txt' overwrite into table same_type1_a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@same_type1_a_txt +PREHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_a_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@same_type1_a_txt +PREHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_a_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@same_type1_a_txt +POSTHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).b SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c1 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c1, type:char(12), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c2 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c2, type:char(25), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c3 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c3, type:varchar(25), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c4 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c4, type:varchar(10), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c5 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c5, type:decimal(12,4), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c6 SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:c6, type:decimal(20,10), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).insert_num SIMPLE [(same_type1_a_txt)same_type1_a_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +same_type1_a_txt.insert_num same_type1_a_txt.c1 same_type1_a_txt.c2 same_type1_a_txt.c3 same_type1_a_txt.c4 same_type1_a_txt.c5 same_type1_a_txt.c6 same_type1_a_txt.b +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Input: default@part_change_same_type_different_params@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Input: default@part_change_same_type_different_params@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 b +1 1 excess corporation believable gateway thumb repe 76855494.3900 NULL original +2 1 mutation hideout authentic blankness sonic disfigure hover know 42093605.0000 3182652.4066800000 original +3 1 hibernation country detox coyotes ball pulse candle direct 1413570.3000 8.3095808155 original +4 1 aquarium encounter existent amuse fuse light headlock bumper adm 5475414.6500 25937.6752563004 original +5 1 123456789012 1234567890123456789012345 1234567890123456789012345 1234567890 12345678.1234 1234567890.0987654321 original +PREHOOK: query: alter table part_change_same_type_different_params replace columns (insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Output: default@part_change_same_type_different_params +POSTHOOK: query: alter table part_change_same_type_different_params replace columns (insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Output: default@part_change_same_type_different_params +PREHOOK: query: CREATE TABLE same_type1_b_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@same_type1_b_txt +POSTHOOK: query: CREATE TABLE same_type1_b_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@same_type1_b_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_b.txt' overwrite into table same_type1_b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@same_type1_b_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_b.txt' overwrite into table same_type1_b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@same_type1_b_txt +PREHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_b_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@same_type1_b_txt +PREHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: query: insert into table part_change_same_type_different_params partition(part=1) select * from same_type1_b_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@same_type1_b_txt +POSTHOOK: Output: default@part_change_same_type_different_params@part=1 +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).b SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c1 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c1, type:char(8), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c2 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c2, type:char(32), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c3 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c3, type:varchar(15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c4 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c4, type:varchar(18), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c5 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c5, type:decimal(10,2), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).c6 SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:c6, type:decimal(25,15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=1).insert_num SIMPLE [(same_type1_b_txt)same_type1_b_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +same_type1_b_txt.insert_num same_type1_b_txt.c1 same_type1_b_txt.c2 same_type1_b_txt.c3 same_type1_b_txt.c4 same_type1_b_txt.c5 same_type1_b_txt.c6 same_type1_b_txt.b +PREHOOK: query: CREATE TABLE same_type1_c_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@same_type1_c_txt +POSTHOOK: query: CREATE TABLE same_type1_c_txt(insert_num int, c1 CHAR(8), c2 CHAR(32), c3 VARCHAR(15), c4 VARCHAR(18), c5 DECIMAL(10,2), c6 DECIMAL(25,15), b STRING) +row format delimited fields terminated by '|' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@same_type1_c_txt +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_c.txt' overwrite into table same_type1_c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@same_type1_c_txt +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/same_type1_c.txt' overwrite into table same_type1_c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@same_type1_c_txt +PREHOOK: query: insert into table part_change_same_type_different_params partition(part=2) select * from same_type1_c_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@same_type1_c_txt +PREHOOK: Output: default@part_change_same_type_different_params@part=2 +POSTHOOK: query: insert into table part_change_same_type_different_params partition(part=2) select * from same_type1_c_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@same_type1_c_txt +POSTHOOK: Output: default@part_change_same_type_different_params@part=2 +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).b SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:b, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c1 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c1, type:char(8), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c2 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c2, type:char(32), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c3 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c3, type:varchar(15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c4 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c4, type:varchar(18), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c5 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c5, type:decimal(10,2), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).c6 SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:c6, type:decimal(25,15), comment:null), ] +POSTHOOK: Lineage: part_change_same_type_different_params PARTITION(part=2).insert_num SIMPLE [(same_type1_c_txt)same_type1_c_txt.FieldSchema(name:insert_num, type:int, comment:null), ] +same_type1_c_txt.insert_num same_type1_c_txt.c1 same_type1_c_txt.c2 same_type1_c_txt.c3 same_type1_c_txt.c4 same_type1_c_txt.c5 same_type1_c_txt.c6 same_type1_c_txt.b +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_same_type_different_params + Statistics: Num rows: 13 Data size: 9347 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: char(8)), c2 (type: char(32)), c3 (type: varchar(15)), c4 (type: varchar(18)), c5 (type: decimal(10,2)), c6 (type: decimal(25,15)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 + Statistics: Num rows: 13 Data size: 9347 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 13 Data size: 9347 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Input: default@part_change_same_type_different_params@part=1 +PREHOOK: Input: default@part_change_same_type_different_params@part=2 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,b from part_change_same_type_different_params +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Input: default@part_change_same_type_different_params@part=1 +POSTHOOK: Input: default@part_change_same_type_different_params@part=2 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 b +1 1 excess corporation believable gateway thumb repe 76855494.39 NULL original +10 2 dreamlan raster devastation association boa lubricant century 5830.99 NULL new +11 2 tiger however center propeller agoni shocking misshapen 6946533.80 31608910.030000000000000 new +12 2 bulgin extensive ambivalent rastled billion zeroes 943084.30 3090297719.717120349328271 new +13 2 12345678 12345678901234567890123456789012 123456789012345 123456789012345678 NULL 1234567890.543210987654321 new +2 1 mutation hideout authentic blankness sonic hover know 42093605.00 3182652.406680000000000 original +3 1 hibernat country detox coyotes ball pulse cand direct 1413570.30 8.309580815500000 original +4 1 aquarium encounter existent amuse fuse light head bumper adm 5475414.65 25937.675256300400000 original +5 1 12345678 1234567890123456789012345 123456789012345 1234567890 12345678.12 1234567890.098765432100000 original +6 1 hollow innocent crabs blushing ambition ebony liquor age NULL 3841833197.314137090000000 new +7 1 wig feel social fork drum search bump conclusion 8.31 NULL new +8 1 bubble f drain loyal station racket antique bu 0.69 22.832613278700000 new +9 1 12345678 1234567890123456789012345 123456789012345 1234567890 NULL 1234567890.543210987700000 new +PREHOOK: query: drop table part_change_same_type_different_params +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_same_type_different_params +PREHOOK: Output: default@part_change_same_type_different_params +POSTHOOK: query: drop table part_change_same_type_different_params +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_same_type_different_params +POSTHOOK: Output: default@part_change_same_type_different_params diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_llap_io.q.out new file mode 100644 index 0000000..aa349ee --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_llap_io.q.out @@ -0,0 +1,1474 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: CREATE TABLE part_add_int_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (1, 1111, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 +PREHOOK: query: alter table part_add_int_permute_select add columns(c int) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: alter table part_add_int_permute_select add columns(c int) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).c SCRIPT [] +POSTHOOK: Lineage: part_add_int_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 col4 +PREHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_int_permute_select + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), part (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,a,b from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b +1 1 1111 new +2 1 2222 new +PREHOOK: query: select insert_num,part,a,b,c from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c +1 1 1111 new NULL +2 1 2222 new NULL +PREHOOK: query: select insert_num,part,c from part_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c from part_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Input: default@part_add_int_permute_select@part=1 +#### A masked pattern was here #### +insert_num part c +1 1 NULL +2 1 NULL +PREHOOK: query: drop table part_add_int_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_int_permute_select +PREHOOK: Output: default@part_add_int_permute_select +POSTHOOK: query: drop table part_add_int_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_int_permute_select +POSTHOOK: Output: default@part_add_int_permute_select +PREHOOK: query: CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: CREATE TABLE part_add_int_string_permute_select(insert_num int, a INT, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (1, 1111, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 +PREHOOK: query: alter table part_add_int_string_permute_select add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: alter table part_add_int_string_permute_select add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: query: insert into table part_add_int_string_permute_select partition(part=1) VALUES (2, 2222, 'new', 3333, '4444') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_add_int_string_permute_select@part=1 +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).a SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).c SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).d SCRIPT [] +POSTHOOK: Lineage: part_add_int_string_permute_select PARTITION(part=1).insert_num SCRIPT [] +col1 col2 col3 col4 col5 +PREHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_string_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,a,b from part_add_int_string_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_add_int_string_permute_select + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), part (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 198 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,a,b from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b +1 1 1111 new +2 1 2222 new +PREHOOK: query: select insert_num,part,a,b,c from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c +1 1 1111 new NULL +2 1 2222 new NULL +PREHOOK: query: select insert_num,part,a,b,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,b,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a b c d +1 1 1111 new NULL NULL +2 1 2222 new NULL NULL +PREHOOK: query: select insert_num,part,a,c,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,c,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a c d +1 1 1111 NULL NULL +2 1 2222 NULL NULL +PREHOOK: query: select insert_num,part,a,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,a,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part a d +1 1 1111 NULL +2 1 2222 NULL +PREHOOK: query: select insert_num,part,c from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part c +1 1 NULL +2 1 NULL +PREHOOK: query: select insert_num,part,d from part_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,d from part_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Input: default@part_add_int_string_permute_select@part=1 +#### A masked pattern was here #### +insert_num part d +1 1 NULL +2 1 NULL +PREHOOK: query: drop table part_add_int_string_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_add_int_string_permute_select +PREHOOK: Output: default@part_add_int_string_permute_select +POSTHOOK: query: drop table part_add_int_string_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_add_int_string_permute_select +POSTHOOK: Output: default@part_add_int_string_permute_select +PREHOOK: query: CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: CREATE TABLE part_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: alter table part_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111 +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: query: insert into table part_change_string_group_double partition(part=1) SELECT insert_num, double1, double1, double1, 'new' FROM schema_evolution_data WHERE insert_num = 111 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_double@part=1 +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_double PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num double1 double1 double1 _c4 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_string_group_double +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_string_group_double + Statistics: Num rows: 5 Data size: 500 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 5 Data size: 500 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 500 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_string_group_double +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Input: default@part_change_string_group_double@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_string_group_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Input: default@part_change_string_group_double@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 original +102 1 -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 original +103 1 NULL NULL NULL original +104 1 30.774 30.774 30.774 original +105 1 46114.28 46114.28 46114.28 original +PREHOOK: query: drop table part_change_string_group_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_string_group_double +PREHOOK: Output: default@part_change_string_group_double +POSTHOOK: query: drop table part_change_string_group_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_string_group_double +POSTHOOK: Output: default@part_change_string_group_double +PREHOOK: query: CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: CREATE TABLE part_change_date_group_string_group_date_timestamp(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 date1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c11 +PREHOOK: query: alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: alter table part_change_date_group_string_group_date_timestamp replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: query: insert into table part_change_date_group_string_group_date_timestamp partition(part=1) VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp@part=1 +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_date_group_string_group_date_timestamp PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_date_group_string_group_date_timestamp + Statistics: Num rows: 6 Data size: 9960 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), c6 (type: string), c7 (type: char(50)), c8 (type: char(15)), c9 (type: varchar(50)), c10 (type: varchar(15)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 + Statistics: Num rows: 6 Data size: 8952 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 8952 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_date_group_string_group_date_timestamp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original +102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original +105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original +111 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_date_group_string_group_date_timestamp +PREHOOK: Output: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: query: drop table part_change_date_group_string_group_date_timestamp +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp +POSTHOOK: Output: default@part_change_date_group_string_group_date_timestamp +PREHOOK: query: CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: CREATE TABLE part_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 _c21 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 1 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL original +102 1 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 original +105 1 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 original +PREHOOK: query: alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: alter table part_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_multi_ints_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c16 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c17 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c18 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c19 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c20 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_multi_ints_string_group PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_numeric_group_string_group_multi_ints_string_group + Statistics: Num rows: 6 Data size: 1140 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(50)), c8 (type: char(50)), c9 (type: char(5)), c10 (type: char(5)), c11 (type: char(5)), c12 (type: char(5)), c13 (type: varchar(50)), c14 (type: varchar(50)), c15 (type: varchar(50)), c16 (type: varchar(50)), c17 (type: varchar(5)), c18 (type: varchar(5)), c19 (type: varchar(5)), c20 (type: varchar(5)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22 + Statistics: Num rows: 6 Data size: 1140 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 1140 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 1 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL original +102 1 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 20333 88888 23 834 203332 888888857923222 23 834 20333 88888 original +105 1 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 original +111 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +PREHOOK: query: drop table part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: drop table part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: CREATE TABLE part_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 _c16 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 original +102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 original +105 1 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 original +PREHOOK: query: alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: alter table part_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: query: insert into table part_change_numeric_group_string_group_floating_string_group partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group@part=1 +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_numeric_group_string_group_floating_string_group PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_numeric_group_string_group_floating_string_group + Statistics: Num rows: 6 Data size: 3300 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: char(50)), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(7)), c8 (type: char(7)), c9 (type: char(7)), c10 (type: varchar(50)), c11 (type: varchar(50)), c12 (type: varchar(50)), c13 (type: varchar(7)), c14 (type: varchar(7)), c15 (type: varchar(7)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17 + Statistics: Num rows: 6 Data size: 3300 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 3300 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original +102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original +105 1 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original +111 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new +PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@part_change_numeric_group_string_group_floating_string_group +PREHOOK: query: CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: CREATE TABLE part_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c5 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c6 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c7 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c8 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 b +101 1 freckled freckled freckled freckled original +102 1 ox ox ox ox original +103 1 original +104 1 I cooked I cooked I cooked I cooked original +105 1 200 200 200 200 original +PREHOOK: query: alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: alter table part_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: query: insert into table part_change_string_group_string_group_string partition(part=1) VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_string_group_string_group_string@part=1 +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_string_group_string_group_string PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_string_group_string_group_string + Statistics: Num rows: 6 Data size: 2712 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: varchar(50)), c4 (type: char(9)), c5 (type: varchar(50)), c6 (type: varchar(9)), c7 (type: string), c8 (type: char(50)), c9 (type: char(9)), c10 (type: string), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 + Statistics: Num rows: 6 Data size: 2712 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 2712 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from part_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Input: default@part_change_string_group_string_group_string@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1 freckled freckled freckled freckled freckled freckled freckled freckled freckled freckled original +102 1 ox ox ox ox ox ox ox ox ox ox original +103 1 original +104 1 I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked original +105 1 200 200 200 200 200 200 200 200 200 200 original +111 1 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table part_change_string_group_string_group_string +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_string_group_string_group_string +PREHOOK: Output: default@part_change_string_group_string_group_string +POSTHOOK: query: drop table part_change_string_group_string_group_string +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_string_group_string_group_string +POSTHOOK: Output: default@part_change_string_group_string_group_string +PREHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 bigint1 bigint1 bigint1 _c19 +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 1 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL original +102 1 127 127 127 127 127 127 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 23 23 23 23 23 834 834 834 834 834 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 original +105 1 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 original +PREHOOK: query: alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: alter table part_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_tinyint_to_bigint partition(part=1) VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c10 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c11 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c12 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c13 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c14 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c15 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c16 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c17 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c18 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c4 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c5 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c6 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c7 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c8 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).c9 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_tinyint_to_bigint PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_lower_to_higher_numeric_group_tinyint_to_bigint + Statistics: Num rows: 6 Data size: 1060 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: smallint), c2 (type: int), c3 (type: bigint), c4 (type: decimal(38,18)), c5 (type: float), c6 (type: double), c7 (type: int), c8 (type: bigint), c9 (type: decimal(38,18)), c10 (type: float), c11 (type: double), c12 (type: bigint), c13 (type: decimal(38,18)), c14 (type: float), c15 (type: double), c16 (type: decimal(38,18)), c17 (type: float), c18 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20 + Statistics: Num rows: 6 Data size: 1060 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 1060 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 1 -128 -128 -128 -128.000000000000000000 -128.0 -128.0 NULL NULL NULL NULL NULL -2147483648 -2147483648.000000000000000000 -2.14748365E9 -2.147483648E9 NULL NULL NULL original +102 1 127 127 127 127.000000000000000000 127.0 127.0 32767 32767 32767.000000000000000000 32767.0 32767.0 2147483647 2147483647.000000000000000000 2.14748365E9 2.147483647E9 9223372036854775807.000000000000000000 9.223372E18 9.223372036854776E18 original +103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 1 23 23 23 23.000000000000000000 23.0 23.0 834 834 834.000000000000000000 834.0 834.0 203332 203332.000000000000000000 203332.0 203332.0 888888857923222.000000000000000000 8.8888885E14 8.88888857923222E14 original +105 1 -99 -99 -99 -99.000000000000000000 -99.0 -99.0 -28300 -28300 -28300.000000000000000000 -28300.0 -28300.0 -999992 -999992.000000000000000000 -999992.0 -999992.0 -222282153733.000000000000000000 -2.22282154E11 -2.22282153733E11 original +111 1 NULL NULL NULL NULL NULL NULL NULL NULL 1234.000000000000000000 9876.0 789.0 90000000 1234.000000000000000000 9876.0 789.0 1234.000000000000000000 9876.0 789.0 new +PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: drop table part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: CREATE TABLE part_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) PARTITIONED BY(part INT) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).b SIMPLE [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 float1 _c4 +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 99999999999999999999.999999999999999999 99999999999999999999.999999999999999999 Infinity original +102 1 -99999999999999999999.999999999999999999 -99999999999999999999.999999999999999999 -Infinity original +103 1 NULL NULL NULL original +104 1 66475.561431000000000000 66475.561431000000000000 -100.35978 original +105 1 9250340.750000000000000000 9250340.750000000000000000 NULL original +PREHOOK: query: alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: alter table part_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: query: insert into table part_change_lower_to_higher_numeric_group_decimal_to_float partition(part=1) VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).b SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c1 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c2 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).c3 SCRIPT [] +POSTHOOK: Lineage: part_change_lower_to_higher_numeric_group_decimal_to_float PARTITION(part=1).insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: part_change_lower_to_higher_numeric_group_decimal_to_float + Statistics: Num rows: 6 Data size: 648 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), part (type: int), c1 (type: float), c2 (type: double), c3 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 6 Data size: 648 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 648 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,part,c1,c2,c3,b from part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float@part=1 +#### A masked pattern was here #### +insert_num part c1 c2 c3 b +101 1 1.0E20 1.0E20 Infinity original +102 1 -1.0E20 -1.0E20 -Infinity original +103 1 NULL NULL NULL original +104 1 66475.56 66475.561431 -100.35977935791016 original +105 1 9250341.0 9250340.75 NULL original +111 1 1234.5677 9876.543 1234.5677490234375 new +PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@part_change_lower_to_higher_numeric_group_decimal_to_float diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table_llap_io.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table_llap_io.q.out new file mode 100644 index 0000000..f64de02 --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table_llap_io.q.out @@ -0,0 +1,1291 @@ +PREHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), float1 float, double1 double, string1 string, string2 string, date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str string, int_str string, bigint_str string, decimal_str string, float_str string, double_str string, date_str string, timestamp_str string, filler string) +row format delimited fields terminated by '|' stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@schema_evolution_data +POSTHOOK: query: load data local inpath '../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into table schema_evolution_data +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@schema_evolution_data +PREHOOK: query: CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: CREATE TABLE table_add_int_permute_select(insert_num int, a INT, b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: insert into table table_add_int_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_add_int_permute_select +POSTHOOK: Lineage: table_add_int_permute_select.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_add_int_permute_select.b SIMPLE [] +POSTHOOK: Lineage: table_add_int_permute_select.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table_add_int_permute_select add columns(c int) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table_add_int_permute_select +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: alter table table_add_int_permute_select add columns(c int) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table_add_int_permute_select +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: insert into table table_add_int_permute_select VALUES (111, 80000, 'new', 80000) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_add_int_permute_select +POSTHOOK: Lineage: table_add_int_permute_select.a SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.b SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.c SCRIPT [] +POSTHOOK: Lineage: table_add_int_permute_select.insert_num SCRIPT [] +col1 col2 col3 col4 +PREHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_add_int_permute_select + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,a,b from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num a b +101 -2147483648 original +102 2147483647 original +103 NULL original +104 203332 original +105 -999992 original +111 80000 new +PREHOOK: query: select insert_num,a,b,c from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num a b c +101 -2147483648 original NULL +102 2147483647 original NULL +103 NULL original NULL +104 203332 original NULL +105 -999992 original NULL +111 80000 new 80000 +PREHOOK: query: select insert_num,c from table_add_int_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c from table_add_int_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_permute_select +#### A masked pattern was here #### +insert_num c +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 80000 +PREHOOK: query: drop table table_add_int_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_add_int_permute_select +PREHOOK: Output: default@table_add_int_permute_select +POSTHOOK: query: drop table table_add_int_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_add_int_permute_select +POSTHOOK: Output: default@table_add_int_permute_select +PREHOOK: query: CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: CREATE TABLE table_add_int_string_permute_select(insert_num int, a INT, b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: insert into table table_add_int_string_permute_select SELECT insert_num, int1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: Lineage: table_add_int_string_permute_select.a SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_add_int_string_permute_select.b SIMPLE [] +POSTHOOK: Lineage: table_add_int_string_permute_select.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num int1 _c2 +PREHOOK: query: alter table table_add_int_string_permute_select add columns(c int, d string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@table_add_int_string_permute_select +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: alter table table_add_int_string_permute_select add columns(c int, d string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@table_add_int_string_permute_select +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: insert into table table_add_int_string_permute_select VALUES (111, 80000, 'new', 80000, 'filler') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: Lineage: table_add_int_string_permute_select.a SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.b SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.c SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.d SCRIPT [] +POSTHOOK: Lineage: table_add_int_string_permute_select.insert_num SCRIPT [] +col1 col2 col3 col4 col5 +PREHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_string_permute_select +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,a,b from table_add_int_string_permute_select +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_add_int_string_permute_select + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: insert_num (type: int), a (type: int), b (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 600 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,a,b from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b +101 -2147483648 original +102 2147483647 original +103 NULL original +104 203332 original +105 -999992 original +111 80000 new +PREHOOK: query: select insert_num,a,b,c from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b c +101 -2147483648 original NULL +102 2147483647 original NULL +103 NULL original NULL +104 203332 original NULL +105 -999992 original NULL +111 80000 new 80000 +PREHOOK: query: select insert_num,a,b,c,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,b,c,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a b c d +101 -2147483648 original NULL NULL +102 2147483647 original NULL NULL +103 NULL original NULL NULL +104 203332 original NULL NULL +105 -999992 original NULL NULL +111 80000 new 80000 filler +PREHOOK: query: select insert_num,a,c,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,c,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a c d +101 -2147483648 NULL NULL +102 2147483647 NULL NULL +103 NULL NULL NULL +104 203332 NULL NULL +105 -999992 NULL NULL +111 80000 80000 filler +PREHOOK: query: select insert_num,a,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,a,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num a d +101 -2147483648 NULL +102 2147483647 NULL +103 NULL NULL +104 203332 NULL +105 -999992 NULL +111 80000 filler +PREHOOK: query: select insert_num,c from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num c +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 80000 +PREHOOK: query: select insert_num,d from table_add_int_string_permute_select +PREHOOK: type: QUERY +PREHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,d from table_add_int_string_permute_select +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_add_int_string_permute_select +#### A masked pattern was here #### +insert_num d +101 NULL +102 NULL +103 NULL +104 NULL +105 NULL +111 filler +PREHOOK: query: drop table table_add_int_string_permute_select +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_add_int_string_permute_select +PREHOOK: Output: default@table_add_int_string_permute_select +POSTHOOK: query: drop table table_add_int_string_permute_select +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_add_int_string_permute_select +POSTHOOK: Output: default@table_add_int_string_permute_select +PREHOOK: query: CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: CREATE TABLE table_change_string_group_double(insert_num int, c1 STRING, c2 CHAR(50), c3 VARCHAR(50), b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: insert into table table_change_string_group_double SELECT insert_num, double_str, double_str, double_str, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_string_group_double +POSTHOOK: Lineage: table_change_string_group_double.b SIMPLE [] +POSTHOOK: Lineage: table_change_string_group_double.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.c2 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.c3 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double_str, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_double.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_string_group_double +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: alter table table_change_string_group_double replace columns (insert_num int, c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_string_group_double +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: insert into table table_change_string_group_double VALUES (111, 789.321, 789.321, 789.321, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_string_group_double +POSTHOOK: Lineage: table_change_string_group_double.b SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_double.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,b from table_change_string_group_double +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,b from table_change_string_group_double +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_change_string_group_double + Statistics: Num rows: 6 Data size: 720 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 6 Data size: 720 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 720 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_double +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_double +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 original +102 -1.7976931348623157E308 -1.7976931348623157E308 -1.7976931348623157E308 original +103 NULL NULL NULL original +104 30.774 30.774 30.774 original +105 46114.28 46114.28 46114.28 original +111 789.321 789.321 789.321 new +PREHOOK: query: drop table table_change_string_group_double +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_string_group_double +PREHOOK: Output: default@table_change_string_group_double +POSTHOOK: query: drop table table_change_string_group_double +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_string_group_double +POSTHOOK: Output: default@table_change_string_group_double +PREHOOK: query: CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: CREATE TABLE table_change_date_group_string_group_date_group(insert_num int, c1 DATE, c2 DATE, c3 DATE, c4 DATE, c5 DATE, c6 TIMESTAMP, c7 TIMESTAMP, c8 TIMESTAMP, c9 TIMESTAMP, c10 TIMESTAMP, b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: insert into table table_change_date_group_string_group_date_group SELECT insert_num, date1, date1, date1, date1, date1, timestamp1, timestamp1, timestamp1, timestamp1, timestamp1, 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:date1, type:date, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:timestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num date1 date1 date1 date1 date1 timestamp1 timestamp1 timestamp1 timestamp1 timestamp1 _c11 +PREHOOK: query: alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_date_group_string_group_date_group +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: alter table table_change_date_group_string_group_date_group replace columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 VARCHAR(15), c6 STRING, c7 CHAR(50), c8 CHAR(15), c9 VARCHAR(50), c10 VARCHAR(15), b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: insert into table table_change_date_group_string_group_date_group VALUES (111, 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_date_group_string_group_date_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_date_group_string_group_date_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_date_group_string_group_date_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original +102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original +105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original +111 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_date_group_string_group_date_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_date_group_string_group_date_group +PREHOOK: Output: default@table_change_date_group_string_group_date_group +POSTHOOK: query: drop table table_change_date_group_string_group_date_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_date_group_string_group_date_group +POSTHOOK: Output: default@table_change_date_group_string_group_date_group +PREHOOK: query: CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: CREATE TABLE table_change_numeric_group_string_group_multi_ints_string_group(insert_num int, + c1 tinyint, c2 smallint, c3 int, c4 bigint, + c5 tinyint, c6 smallint, c7 int, c8 bigint, c9 tinyint, c10 smallint, c11 int, c12 bigint, + c13 tinyint, c14 smallint, c15 int, c16 bigint, c17 tinyint, c18 smallint, c19 int, c20 bigint, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group SELECT insert_num, + tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + tinyint1, smallint1, int1, bigint1, tinyint1, smallint1, int1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c19 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c20 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 tinyint1 smallint1 int1 bigint1 _c21 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL original +102 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 203332 888888857923222 original +105 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 original +PREHOOK: query: alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: alter table table_change_numeric_group_string_group_multi_ints_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, c4 STRING, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), c8 CHAR(50), c9 CHAR(5), c10 CHAR(5), c11 CHAR(5), c12 CHAR(5), + c13 VARCHAR(50), c14 VARCHAR(50), c15 VARCHAR(50), c16 VARCHAR(50), c17 VARCHAR(5), c18 VARCHAR(5), c19 VARCHAR(5), c20 VARCHAR(5), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_multi_ints_string_group VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c16 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c17 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c18 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c19 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c20 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_multi_ints_string_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 _col20 _col21 +PREHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_change_numeric_group_string_group_multi_ints_string_group + Statistics: Num rows: 6 Data size: 15696 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(50)), c8 (type: char(50)), c9 (type: char(5)), c10 (type: char(5)), c11 (type: char(5)), c12 (type: char(5)), c13 (type: varchar(50)), c14 (type: varchar(50)), c15 (type: varchar(50)), c16 (type: varchar(50)), c17 (type: varchar(5)), c18 (type: varchar(5)), c19 (type: varchar(5)), c20 (type: varchar(5)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21 + Statistics: Num rows: 6 Data size: 15696 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 15696 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 b +101 -128 NULL -2147483648 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL -128 NULL -2147483648 NULL -128 NULL -2147 NULL original +102 127 32767 2147483647 9223372036854775807 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 127 32767 2147483647 9223372036854775807 127 32767 21474 92233 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 834 203332 888888857923222 23 834 203332 888888857923222 23 834 20333 88888 23 834 203332 888888857923222 23 834 20333 88888 original +105 -99 -28300 -999992 -222282153733 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 -99 -28300 -999992 -222282153733 -99 -2830 -9999 -2222 original +111 filler filler filler filler filler filler filler filler fille fille fille fille filler filler filler filler fille fille fille fille new +PREHOOK: query: drop table table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: query: drop table table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_numeric_group_string_group_multi_ints_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_multi_ints_string_group +PREHOOK: query: CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: CREATE TABLE table_change_numeric_group_string_group_floating_string_group(insert_num int, + c1 decimal(38,18), c2 float, c3 double, + c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 float, c9 double, + c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), c14 float, c15 double, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group SELECT insert_num, + decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + decimal1, float1, double1, decimal1, float1, double1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.b SIMPLE [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:double1, type:double, comment:null), ] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 decimal1 float1 double1 _c16 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 original +102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 original +105 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 original +PREHOOK: query: alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: alter table table_change_numeric_group_string_group_floating_string_group replace columns (insert_num int, + c1 STRING, c2 STRING, c3 STRING, + c4 CHAR(50), c5 CHAR(50), c6 CHAR(50), c7 CHAR(7), c8 CHAR(7), c9 CHAR(7), + c10 VARCHAR(50), c11 VARCHAR(50), c12 VARCHAR(50), c13 VARCHAR(7), c14 VARCHAR(7), c15 VARCHAR(7), + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: insert into table table_change_numeric_group_string_group_floating_string_group VALUES (111, + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.b SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_numeric_group_string_group_floating_string_group.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 +PREHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: table_change_numeric_group_string_group_floating_string_group + Statistics: Num rows: 6 Data size: 10884 Basic stats: COMPLETE Column stats: PARTIAL + Select Operator + expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: char(50)), c5 (type: char(50)), c6 (type: char(50)), c7 (type: char(7)), c8 (type: char(7)), c9 (type: char(7)), c10 (type: varchar(50)), c11 (type: varchar(50)), c12 (type: varchar(50)), c13 (type: varchar(7)), c14 (type: varchar(7)), c15 (type: varchar(7)), b (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 + Statistics: Num rows: 6 Data size: 10884 Basic stats: COMPLETE Column stats: PARTIAL + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 10884 Basic stats: COMPLETE Column stats: PARTIAL + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b +101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original +102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 66475.561431000000000000 -100.35978 30.774 66475.561431000000000000 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431000000000000 -100.35978 30.774 66475.5 -100.35 30.774 original +105 9250340.750000000000000000 NULL 46114.28 9250340.750000000000000000 NULL 46114.28 9250340 NULL 46114.2 9250340.750000000000000000 NULL 46114.28 9250340 NULL 46114.2 original +111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_numeric_group_string_group_floating_string_group +POSTHOOK: Output: default@table_change_numeric_group_string_group_floating_string_group +PREHOOK: query: CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: CREATE TABLE table_change_string_group_string_group_string(insert_num int, + c1 string, c2 string, c3 string, c4 string, + c5 CHAR(50), c6 CHAR(50), c7 CHAR(50), + c8 VARCHAR(50), c9 VARCHAR(50), c10 VARCHAR(50), b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: insert into table table_change_string_group_string_group_string SELECT insert_num, + string2, string2, string2, string2, + string2, string2, string2, + string2, string2, string2, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: Lineage: table_change_string_group_string_group_string.b SIMPLE [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c10 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c5 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c6 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c7 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c8 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c9 EXPRESSION [(schema_evolution_data)schema_evolution_data.FieldSchema(name:string2, type:string, comment:null), ] +POSTHOOK: Lineage: table_change_string_group_string_group_string.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 b +101 freckled freckled freckled freckled original +102 ox ox ox ox original +103 original +104 I cooked I cooked I cooked I cooked original +105 200 200 200 200 original +PREHOOK: query: alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_string_group_string_group_string +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: alter table table_change_string_group_string_group_string replace columns (insert_num int, + c1 CHAR(50), c2 CHAR(9), c3 VARCHAR(50), c4 CHAR(9), + c5 VARCHAR(50), c6 VARCHAR(9), c7 STRING, + c8 CHAR(50), c9 CHAR(9), c10 STRING, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_string_group_string_group_string +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: insert into table table_change_string_group_string_group_string VALUES (111, + 'filler', 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'filler', 'filler', 'filler', + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: Lineage: table_change_string_group_string_group_string.b SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_string_group_string_group_string.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,b from table_change_string_group_string_group_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_string_group_string_group_string +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b +101 freckled freckled freckled freckled freckled freckled freckled freckled freckled freckled original +102 ox ox ox ox ox ox ox ox ox ox original +103 original +104 I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked I cooked original +105 200 200 200 200 200 200 200 200 200 200 original +111 filler filler filler filler filler filler filler filler filler filler new +PREHOOK: query: drop table table_change_string_group_string_group_string +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_string_group_string_group_string +PREHOOK: Output: default@table_change_string_group_string_group_string +POSTHOOK: query: drop table table_change_string_group_string_group_string +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_string_group_string_group_string +POSTHOOK: Output: default@table_change_string_group_string_group_string +PREHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint_to_bigint(insert_num int, + c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, + c7 smallint, c8 smallint, c9 smallint, c10 smallint, c11 smallint, + c12 int, c13 int, c14 int, c15 int, + c16 bigint, c17 bigint, c18 bigint, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint SELECT insert_num, + tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, tinyint1, + smallint1, smallint1, smallint1, smallint1, smallint1, + int1, int1, int1, int1, + bigint1, bigint1, bigint1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.b SIMPLE [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c10 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c11 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c12 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c13 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c14 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c15 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:int1, type:int, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c16 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c17 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c18 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:bigint1, type:bigint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c4 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c5 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c6 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:tinyint1, type:tinyint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c7 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c8 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c9 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:smallint1, type:smallint, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 tinyint1 smallint1 smallint1 smallint1 smallint1 smallint1 int1 int1 int1 int1 bigint1 bigint1 bigint1 _c19 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 -128 -128 -128 -128 -128 -128 NULL NULL NULL NULL NULL -2147483648 -2147483648 -2147483648 -2147483648 NULL NULL NULL original +102 127 127 127 127 127 127 32767 32767 32767 32767 32767 2147483647 2147483647 2147483647 2147483647 9223372036854775807 9223372036854775807 9223372036854775807 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 23 23 23 23 23 834 834 834 834 834 203332 203332 203332 203332 888888857923222 888888857923222 888888857923222 original +105 -99 -99 -99 -99 -99 -99 -28300 -28300 -28300 -28300 -28300 -999992 -999992 -999992 -999992 -222282153733 -222282153733 -222282153733 original +PREHOOK: query: alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: alter table table_change_lower_to_higher_numeric_group_tinyint_to_bigint replace columns (insert_num int, + c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, c6 DOUBLE, + c7 INT, c8 BIGINT, c9 decimal(38,18), c10 FLOAT, c11 DOUBLE, + c12 BIGINT, c13 decimal(38,18), c14 FLOAT, c15 DOUBLE, + c16 decimal(38,18), c17 FLOAT, c18 DOUBLE, + b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_tinyint_to_bigint VALUES (111, + 7000, 80000, 90000000, 1234.5678, 9876.543, 789.321, + 80000, 90000000, 1234.5678, 9876.543, 789.321, + 90000000, 1234.5678, 9876.543, 789.321, + 1234.5678, 9876.543, 789.321, + 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.b SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c10 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c11 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c12 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c13 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c14 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c15 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c16 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c17 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c18 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c4 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c5 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c6 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c7 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c8 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.c9 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_tinyint_to_bigint.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 _col5 _col6 _col7 _col8 _col9 _col10 _col11 _col12 _col13 _col14 _col15 _col16 _col17 _col18 _col19 +PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,b from table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +#### A masked pattern was here #### +insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 b +101 -128 -128 -128 -128.000000000000000000 -128.0 -128.0 NULL NULL NULL NULL NULL -2147483648 -2147483648.000000000000000000 -2.14748365E9 -2.147483648E9 NULL NULL NULL original +102 127 127 127 127.000000000000000000 127.0 127.0 32767 32767 32767.000000000000000000 32767.0 32767.0 2147483647 2147483647.000000000000000000 2.14748365E9 2.147483647E9 9223372036854775807.000000000000000000 9.223372E18 9.223372036854776E18 original +103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original +104 23 23 23 23.000000000000000000 23.0 23.0 834 834 834.000000000000000000 834.0 834.0 203332 203332.000000000000000000 203332.0 203332.0 888888857923222.000000000000000000 8.8888885E14 8.88888857923222E14 original +105 -99 -99 -99 -99.000000000000000000 -99.0 -99.0 -28300 -28300 -28300.000000000000000000 -28300.0 -28300.0 -999992 -999992.000000000000000000 -999992.0 -999992.0 -222282153733.000000000000000000 -2.22282154E11 -2.22282153733E11 original +111 7000 80000 90000000 1234.567800000000000000 9876.543 789.321 80000 90000000 1234.567800000000000000 9876.543 789.321 90000000 1234.567800000000000000 9876.543 789.321 1234.567800000000000000 9876.543 789.321 new +PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: query: drop table table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_tinyint_to_bigint +PREHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: CREATE TABLE table_change_lower_to_higher_numeric_group_decimal_to_float(insert_num int, + c1 decimal(38,18), c2 decimal(38,18), + c3 float, + b STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +PREHOOK: type: QUERY +PREHOOK: Input: default@schema_evolution_data +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float SELECT insert_num, + decimal1, decimal1, + float1, + 'original' FROM schema_evolution_data +POSTHOOK: type: QUERY +POSTHOOK: Input: default@schema_evolution_data +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.b SIMPLE [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c1 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c2 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:decimal1, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c3 SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:float1, type:float, comment:null), ] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.insert_num SIMPLE [(schema_evolution_data)schema_evolution_data.FieldSchema(name:insert_num, type:int, comment:null), ] +insert_num decimal1 decimal1 float1 _c4 +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 99999999999999999999.999999999999999999 99999999999999999999.999999999999999999 Infinity original +102 -99999999999999999999.999999999999999999 -99999999999999999999.999999999999999999 -Infinity original +103 NULL NULL NULL original +104 66475.561431000000000000 66475.561431000000000000 -100.35978 original +105 9250340.750000000000000000 9250340.750000000000000000 NULL original +PREHOOK: query: alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: alter table table_change_lower_to_higher_numeric_group_decimal_to_float replace columns (insert_num int, c1 float, c2 double, c3 DOUBLE, b STRING) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: insert into table table_change_lower_to_higher_numeric_group_decimal_to_float VALUES (111, 1234.5678, 9876.543, 1234.5678, 'new') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.b SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c1 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c2 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.c3 SCRIPT [] +POSTHOOK: Lineage: table_change_lower_to_higher_numeric_group_decimal_to_float.insert_num SCRIPT [] +_col0 _col1 _col2 _col3 _col4 +PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: QUERY +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +POSTHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +#### A masked pattern was here #### +insert_num c1 c2 c3 b +101 1.0E20 1.0E20 Infinity original +102 -1.0E20 -1.0E20 -Infinity original +103 NULL NULL NULL original +104 66475.56 66475.561431 -100.35978 original +105 9250341.0 9250340.75 NULL original +111 1234.5677 9876.543 1234.5678 new +PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +PREHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table_change_lower_to_higher_numeric_group_decimal_to_float +POSTHOOK: Output: default@table_change_lower_to_higher_numeric_group_decimal_to_float diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part.q.out index be33197..5095f0b 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part.q.out @@ -109,9 +109,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -268,9 +269,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -481,9 +483,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -633,9 +636,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -854,9 +858,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 22 @@ -1065,9 +1070,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 17 @@ -1262,9 +1268,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -1487,9 +1494,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 20 @@ -1654,9 +1662,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 5 diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex.q.out index 98f14b3..d9dbe48 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex.q.out @@ -183,9 +183,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE), (VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -487,9 +488,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE), (VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -717,9 +719,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE), (VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive.q.out index 2859213..0f25c7d 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive.q.out @@ -294,9 +294,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 55 @@ -545,9 +546,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 35 @@ -718,9 +720,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 14 @@ -875,9 +878,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 6 @@ -1113,9 +1117,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE), (VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 8 diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out index 61fbaa0..06bb4b0 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out @@ -109,9 +109,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -273,9 +274,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -499,9 +501,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -796,9 +799,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 22 @@ -1002,9 +1006,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 17 diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part.q.out index fa054a2f..488e062 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part.q.out @@ -109,9 +109,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -268,9 +269,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -481,9 +483,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -633,9 +636,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -854,9 +858,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 22 @@ -1065,9 +1070,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 17 @@ -1262,9 +1268,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -1487,9 +1494,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 20 @@ -1654,9 +1662,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_complex.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_complex.q.out index 6e514db..45ae6a6 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_complex.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_complex.q.out @@ -183,9 +183,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat), (ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -487,9 +488,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat), (ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -717,9 +719,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat), (ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_primitive.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_primitive.q.out index 28df9b3..c1e8961 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_primitive.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_primitive.q.out @@ -294,9 +294,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 55 @@ -545,9 +546,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 35 @@ -718,9 +720,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 14 @@ -875,9 +878,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 6 @@ -1113,9 +1117,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat), (ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 diff --git ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out index 05c0a50..c83ed3c 100644 --- ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out +++ ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out @@ -109,9 +109,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -273,9 +274,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -499,9 +501,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -796,9 +799,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 22 @@ -1002,9 +1006,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, org.apache.hadoop.mapred.TextInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 17 diff --git ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out index 9ce5e1e..dac594c 100644 --- ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out +++ ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out @@ -147,7 +147,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: SELECT operator: Could not vectorize expression (mode = PROJECTION): GenericUDFRegExp(Column[c2], Const string val) because hive.vectorized.adaptor.usage.mode=none vectorized: false @@ -225,7 +225,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: SELECT operator: Could not vectorize expression (mode = PROJECTION): GenericUDFBridge ==> regexp_extract (Column[c2], Const string val_([0-9]+), Const int 1) because hive.vectorized.adaptor.usage.mode=none vectorized: false @@ -303,7 +303,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: SELECT operator: Could not vectorize expression (mode = PROJECTION): GenericUDFBridge ==> regexp_replace (Column[c2], Const string val, Const string replaced) because hive.vectorized.adaptor.usage.mode=none vectorized: false @@ -381,7 +381,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: SELECT operator: Could not vectorize expression (mode = PROJECTION): GenericUDFRegExp(Column[c2], Const string val) because hive.vectorized.adaptor.usage.mode=chosen and the UDF wasn't one of the chosen ones vectorized: false @@ -474,7 +474,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -568,7 +568,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -634,7 +634,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: SELECT operator: Could not vectorize expression (mode = PROJECTION): GenericUDFPower(Column[key], Const int 2) because hive.vectorized.adaptor.usage.mode=none vectorized: false @@ -741,7 +741,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: SELECT operator: Could not vectorize expression (mode = PROJECTION): GenericUDFBridge ==> log (Column[value], Const decimal(20,10) 10) because hive.vectorized.adaptor.usage.mode=none vectorized: false @@ -806,7 +806,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: SELECT operator: Could not vectorize expression (mode = PROJECTION): GenericUDFPower(Column[key], Const int 2) because hive.vectorized.adaptor.usage.mode=chosen and the UDF wasn't one of the chosen ones vectorized: false @@ -913,7 +913,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: SELECT operator: Could not vectorize expression (mode = PROJECTION): GenericUDFBridge ==> log (Column[value], Const decimal(20,10) 10) because hive.vectorized.adaptor.usage.mode=chosen and the UDF wasn't one of the chosen ones vectorized: false @@ -1009,7 +1009,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1135,7 +1135,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out index 476670b..b13d03c 100644 --- ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out +++ ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out @@ -168,9 +168,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 13 @@ -299,9 +300,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 13 @@ -430,9 +432,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 13 diff --git ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out index 5f67488..6d13a98 100644 --- ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out +++ ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out @@ -107,9 +107,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out index 54216fa..2da1f00 100644 --- ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out +++ ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out @@ -1269,7 +1269,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1312,7 +1312,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_between_columns.q.out ql/src/test/results/clientpositive/llap/vector_between_columns.q.out index 48d5275..062319c 100644 --- ql/src/test/results/clientpositive/llap/vector_between_columns.q.out +++ ql/src/test/results/clientpositive/llap/vector_between_columns.q.out @@ -114,7 +114,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -148,7 +148,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -272,7 +272,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -306,7 +306,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_between_in.q.out ql/src/test/results/clientpositive/llap/vector_between_in.q.out index e02f64c..a07b8f5 100644 --- ql/src/test/results/clientpositive/llap/vector_between_in.q.out +++ ql/src/test/results/clientpositive/llap/vector_between_in.q.out @@ -69,7 +69,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -174,7 +174,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -273,7 +273,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -378,7 +378,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -477,7 +477,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -572,7 +572,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -667,7 +667,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -772,7 +772,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1127,7 +1127,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1265,7 +1265,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1403,7 +1403,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -1541,7 +1541,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out index aabfc73..5a9f924 100644 --- ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out +++ ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out @@ -195,7 +195,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -238,7 +238,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -393,7 +393,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -596,7 +596,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -639,7 +639,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_bround.q.out ql/src/test/results/clientpositive/llap/vector_bround.q.out index 7b14a89..932525f 100644 --- ql/src/test/results/clientpositive/llap/vector_bround.q.out +++ ql/src/test/results/clientpositive/llap/vector_bround.q.out @@ -87,9 +87,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out index 22b7722..de30200 100644 --- ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out +++ ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out @@ -173,7 +173,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_char_2.q.out ql/src/test/results/clientpositive/llap/vector_char_2.q.out index 827ec2e..c32d5d1 100644 --- ql/src/test/results/clientpositive/llap/vector_char_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_char_2.q.out @@ -124,7 +124,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -320,7 +320,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_char_4.q.out ql/src/test/results/clientpositive/llap/vector_char_4.q.out index ba704350..d6c0b18 100644 --- ql/src/test/results/clientpositive/llap/vector_char_4.q.out +++ ql/src/test/results/clientpositive/llap/vector_char_4.q.out @@ -176,7 +176,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out index 72cd1d3..51823f1 100644 --- ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out +++ ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out @@ -197,7 +197,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -240,7 +240,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -351,7 +351,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -407,7 +407,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -521,7 +521,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -577,7 +577,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_char_simple.q.out ql/src/test/results/clientpositive/llap/vector_char_simple.q.out index 696359b..fc71cf9 100644 --- ql/src/test/results/clientpositive/llap/vector_char_simple.q.out +++ ql/src/test/results/clientpositive/llap/vector_char_simple.q.out @@ -77,7 +77,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -163,7 +163,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -261,7 +261,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_coalesce.q.out ql/src/test/results/clientpositive/llap/vector_coalesce.q.out index 339df62..3aa19f8 100644 --- ql/src/test/results/clientpositive/llap/vector_coalesce.q.out +++ ql/src/test/results/clientpositive/llap/vector_coalesce.q.out @@ -48,7 +48,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -156,7 +156,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -264,7 +264,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -348,7 +348,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -451,7 +451,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -533,7 +533,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out index e8151b7..542fa3f 100644 --- ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out @@ -245,7 +245,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -368,7 +368,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_complex_all.q.out ql/src/test/results/clientpositive/llap/vector_complex_all.q.out index afdfa62..b8f2a95 100644 --- ql/src/test/results/clientpositive/llap/vector_complex_all.q.out +++ ql/src/test/results/clientpositive/llap/vector_complex_all.q.out @@ -119,9 +119,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -201,9 +202,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -283,9 +285,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -365,9 +368,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -447,9 +451,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -529,9 +534,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -600,8 +606,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: SELECT operator: Could not vectorize expression (mode = PROJECTION): Column[strct].b + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Stage: Stage-0 @@ -682,9 +689,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -721,9 +729,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -760,9 +769,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -802,9 +812,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -941,9 +952,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -1053,8 +1065,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: Key expression for GROUPBY operator: Vectorizing complex type LIST not supported + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Reducer 2 Execution mode: llap @@ -1167,9 +1180,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -1286,8 +1300,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: SELECT operator: Could not vectorize expression (mode = PROJECTION): Column[strct].b + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Reducer 2 Execution mode: vectorized, llap @@ -1397,8 +1412,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: Key expression for GROUPBY operator: Vectorizing complex type MAP not supported + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Reducer 2 Execution mode: llap diff --git ql/src/test/results/clientpositive/llap/vector_complex_join.q.out ql/src/test/results/clientpositive/llap/vector_complex_join.q.out index 3a0c6a4..11a35ab 100644 --- ql/src/test/results/clientpositive/llap/vector_complex_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_complex_join.q.out @@ -93,7 +93,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -136,7 +136,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -272,7 +272,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -308,7 +308,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -407,7 +407,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -443,7 +443,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_count.q.out ql/src/test/results/clientpositive/llap/vector_count.q.out index 400d930..cfb191b 100644 --- ql/src/test/results/clientpositive/llap/vector_count.q.out +++ ql/src/test/results/clientpositive/llap/vector_count.q.out @@ -110,7 +110,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -199,7 +199,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: GROUPBY operator: Aggregations with > 1 parameter are not supported count([Column[a], Column[b]]) vectorized: false Reducer 2 @@ -295,7 +295,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -390,7 +390,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out index 90086ea..c7ab8a4 100644 --- ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out +++ ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out @@ -1291,7 +1291,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_data_types.q.out ql/src/test/results/clientpositive/llap/vector_data_types.q.out index 06b50bb..da630a6 100644 --- ql/src/test/results/clientpositive/llap/vector_data_types.q.out +++ ql/src/test/results/clientpositive/llap/vector_data_types.q.out @@ -247,7 +247,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_date_1.q.out ql/src/test/results/clientpositive/llap/vector_date_1.q.out index 1e3d2b3..2375701 100644 --- ql/src/test/results/clientpositive/llap/vector_date_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_date_1.q.out @@ -752,7 +752,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out index 4f1b509..eac7465 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out @@ -88,9 +88,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -204,9 +205,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -320,9 +322,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -436,9 +439,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -552,9 +556,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -668,9 +673,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -784,9 +790,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -900,9 +907,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1016,9 +1024,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out index 51c0854..5326079 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out @@ -88,9 +88,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -205,9 +206,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 1 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out index ca13c06..bec044b 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out @@ -77,9 +77,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -193,9 +194,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -309,9 +311,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -425,9 +428,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -541,9 +545,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -657,9 +662,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -773,9 +779,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -889,9 +896,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1016,9 +1024,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1132,9 +1141,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1248,9 +1258,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1364,9 +1375,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1480,9 +1492,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1596,9 +1609,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1712,9 +1726,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1828,9 +1843,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1941,9 +1957,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -2020,9 +2037,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -2099,9 +2117,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -2178,9 +2197,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -2248,9 +2268,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -2327,9 +2348,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -2406,9 +2428,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -2485,9 +2508,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -2564,9 +2588,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -2643,9 +2668,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -2722,9 +2748,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out index 9a6dd54..da0e384 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out @@ -161,9 +161,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -302,9 +303,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -454,9 +456,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -498,9 +501,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -683,9 +687,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out index 32e2088..dc5eb00 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out @@ -101,9 +101,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -280,9 +281,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -482,9 +484,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -662,9 +665,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 4 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out index 4d86734..25ee08f 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out @@ -62,9 +62,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -186,9 +187,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out index d63eeb7..f4170af 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out @@ -79,9 +79,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -239,9 +240,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out index aca8dc0..60d3849 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out @@ -154,9 +154,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -205,9 +206,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -418,9 +420,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -470,9 +473,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -760,9 +764,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -812,9 +817,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1026,9 +1032,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1079,9 +1086,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1293,9 +1301,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1345,9 +1354,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1559,9 +1569,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1612,9 +1623,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out index 270b634..fac1b70 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out @@ -140,9 +140,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -386,9 +387,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out index 2fc5277..11137a1 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out @@ -614,9 +614,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1225,9 +1226,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 1 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out index b721914..7338fd0 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out @@ -88,9 +88,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -206,9 +207,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -351,9 +353,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.RCFileInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.RCFileInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe, org.apache.hadoop.hive.ql.io.RCFileInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -468,9 +471,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.RCFileInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.RCFileInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe, org.apache.hadoop.hive.ql.io.RCFileInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -613,9 +617,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -730,9 +735,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out index 3c376c6..30d06dd 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out @@ -91,9 +91,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -259,9 +260,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -454,9 +456,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -638,9 +641,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out index dedad24..aaecf2e 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out @@ -121,9 +121,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out index 56248d1..0361a1f 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out @@ -101,9 +101,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -217,9 +218,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -333,9 +335,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -449,9 +452,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -565,9 +569,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -681,9 +686,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -797,9 +803,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -913,9 +920,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1029,9 +1037,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1151,9 +1160,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1252,9 +1262,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1368,9 +1379,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1484,9 +1496,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1606,9 +1619,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1692,9 +1706,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1811,9 +1826,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1920,9 +1936,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2022,9 +2039,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2138,9 +2156,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2273,9 +2292,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2452,9 +2472,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2723,9 +2744,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2839,9 +2861,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2955,9 +2978,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3071,9 +3095,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3205,9 +3230,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3358,9 +3384,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3485,8 +3512,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: Aggregation Function expression for GROUPBY operator: UDF histogram_numeric not supported + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Reducer 2 Execution mode: llap @@ -3591,9 +3619,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3722,9 +3751,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3853,9 +3883,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -3993,9 +4024,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -4110,9 +4142,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -4227,9 +4260,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -4344,9 +4378,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -4461,9 +4496,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -4578,9 +4614,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -4695,9 +4732,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -4812,9 +4850,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -4929,9 +4968,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -5052,9 +5092,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -5154,9 +5195,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -5271,9 +5313,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -5388,9 +5431,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -5511,9 +5555,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -5598,9 +5643,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -5718,9 +5764,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -5828,9 +5875,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -5931,9 +5979,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6048,9 +6097,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6184,9 +6234,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6364,9 +6415,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6636,9 +6688,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6753,9 +6806,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6870,9 +6924,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6987,9 +7042,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7122,9 +7178,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7276,9 +7333,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7403,8 +7461,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] notVectorizedReason: Aggregation Function expression for GROUPBY operator: UDF histogram_numeric not supported + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: false Reducer 2 Execution mode: llap @@ -7510,9 +7569,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7642,9 +7702,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7774,9 +7835,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out index 90f68f5..b8c4abb 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out @@ -110,9 +110,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -206,9 +207,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -303,9 +305,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -400,9 +403,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out index 73d04a9..5735b42 100644 --- ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out @@ -166,7 +166,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_elt.q.out ql/src/test/results/clientpositive/llap/vector_elt.q.out index 24a1a65..6be2539 100644 --- ql/src/test/results/clientpositive/llap/vector_elt.q.out +++ ql/src/test/results/clientpositive/llap/vector_elt.q.out @@ -65,7 +65,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -176,7 +176,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_groupby4.q.out ql/src/test/results/clientpositive/llap/vector_groupby4.q.out index d0bba2e..1b8b45c 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby4.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby4.q.out @@ -79,7 +79,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_groupby6.q.out ql/src/test/results/clientpositive/llap/vector_groupby6.q.out index 4c68fd0..be02f0d 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby6.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby6.q.out @@ -79,7 +79,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out index 127d8ad..deb12d6 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out @@ -169,7 +169,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out index 7224d59..29e4517 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out @@ -96,9 +96,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -255,9 +256,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -414,9 +416,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -567,9 +570,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -720,9 +724,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -880,9 +885,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out index e6075c7..ee25e46 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out @@ -101,9 +101,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -303,9 +304,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -514,9 +516,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -821,9 +824,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1124,9 +1128,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1452,9 +1457,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1773,9 +1779,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1938,9 +1945,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2167,9 +2175,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out index da4b81f..9aadcad 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out @@ -115,9 +115,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -283,9 +284,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out index d2b738b..d0d62a2 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out @@ -115,9 +115,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -277,9 +278,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -439,9 +441,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -601,9 +604,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -756,9 +760,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -912,9 +917,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1054,9 +1060,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out index 1877bba..c8d1825 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out @@ -101,9 +101,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -282,9 +283,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -464,8 +466,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: GROUPBY operator: Vector aggregation : "sum" for input type: "BYTES" and output type: "DOUBLE" and mode: PARTIAL1 not supported for evaluator GenericUDAFSumDouble + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Reducer 2 Execution mode: vectorized, llap @@ -686,9 +689,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out index 7c9f668..701ef44 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out @@ -85,8 +85,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: GROUPBY operator: Vector aggregation : "avg" for input type: "BYTES" and output type: "STRUCT" and mode: PARTIAL1 not supported for evaluator GenericUDAFAverageEvaluatorDouble + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Reducer 2 Execution mode: vectorized, llap @@ -193,8 +194,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: GROUPBY operator: Vector aggregation : "avg" for input type: "BYTES" and output type: "STRUCT" and mode: PARTIAL1 not supported for evaluator GenericUDAFAverageEvaluatorDouble + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Reducer 2 Execution mode: vectorized, llap @@ -327,8 +329,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: GROUPBY operator: Vector aggregation : "avg" for input type: "BYTES" and output type: "STRUCT" and mode: PARTIAL1 not supported for evaluator GenericUDAFAverageEvaluatorDouble + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Reducer 2 Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out index 6a5e679..93c33e4 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out @@ -121,9 +121,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -365,9 +366,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -628,9 +630,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out index 4d8fa16..1b398e7 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out @@ -100,9 +100,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -292,9 +293,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -511,9 +513,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out index 5e9e204..ff0d241 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out @@ -100,9 +100,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -243,9 +244,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out index b81a0d3..a1aa0a4 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out @@ -100,9 +100,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -261,9 +262,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -429,9 +431,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -598,9 +601,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -804,9 +808,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -965,9 +970,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1140,9 +1146,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1302,9 +1309,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1502,9 +1510,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1668,9 +1677,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1834,9 +1844,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1995,9 +2006,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out index e8ca06e..348f77b 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out @@ -102,9 +102,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -303,9 +304,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -504,9 +506,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -702,9 +705,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -897,9 +901,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1082,9 +1087,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out index 4de6ebb..c3fab04 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out @@ -99,9 +99,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out index 82b6ba0..03880e9 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out @@ -63,7 +63,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -131,7 +131,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out index 0da9a45..637ed00 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out @@ -295,7 +295,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -493,7 +493,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -786,7 +786,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1010,7 +1010,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out index d1263cd..00c8e43 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out @@ -97,9 +97,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -252,9 +253,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -382,9 +384,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -581,9 +584,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -792,9 +796,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out index 0fc4b06..5d31689 100644 --- ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out +++ ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out @@ -192,7 +192,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -323,7 +323,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_if_expr.q.out ql/src/test/results/clientpositive/llap/vector_if_expr.q.out index fb5cdb5..337fb69 100644 --- ql/src/test/results/clientpositive/llap/vector_if_expr.q.out +++ ql/src/test/results/clientpositive/llap/vector_if_expr.q.out @@ -59,7 +59,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out index 4a3bc02..a867156 100644 --- ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out +++ ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out @@ -213,7 +213,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -240,7 +240,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_inner_join.q.out ql/src/test/results/clientpositive/llap/vector_inner_join.q.out index 0e1136a..8eb8645 100644 --- ql/src/test/results/clientpositive/llap/vector_inner_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_inner_join.q.out @@ -119,9 +119,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -170,9 +171,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -278,9 +280,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -341,9 +344,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -493,9 +497,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -545,9 +550,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -637,9 +643,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -706,9 +713,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -824,9 +832,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -876,9 +885,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -993,9 +1003,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1045,9 +1056,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1162,9 +1174,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1214,9 +1227,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1306,9 +1320,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1383,9 +1398,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1475,9 +1491,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1552,9 +1569,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_interval_1.q.out ql/src/test/results/clientpositive/llap/vector_interval_1.q.out index 1be7232..91f42f2 100644 --- ql/src/test/results/clientpositive/llap/vector_interval_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_interval_1.q.out @@ -100,7 +100,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -227,7 +227,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -362,7 +362,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -509,7 +509,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -667,7 +667,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -807,7 +807,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -929,7 +929,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1057,7 +1057,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_interval_2.q.out ql/src/test/results/clientpositive/llap/vector_interval_2.q.out index 7548686..a4e7359 100644 --- ql/src/test/results/clientpositive/llap/vector_interval_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_interval_2.q.out @@ -154,7 +154,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -360,7 +360,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -566,7 +566,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -772,7 +772,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -969,7 +969,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1163,7 +1163,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1347,7 +1347,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1531,7 +1531,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1725,7 +1725,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1919,7 +1919,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out index f2a4d3a..bd8fb13 100644 --- ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out +++ ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out @@ -108,7 +108,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -290,7 +290,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -472,7 +472,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -656,7 +656,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -761,7 +761,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -945,7 +945,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1129,7 +1129,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1311,7 +1311,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out index cfe3d5f..173bb67 100644 --- ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out @@ -258,7 +258,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -301,7 +301,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_join30.q.out ql/src/test/results/clientpositive/llap/vector_join30.q.out index 7c34aff..92dc052 100644 --- ql/src/test/results/clientpositive/llap/vector_join30.q.out +++ ql/src/test/results/clientpositive/llap/vector_join30.q.out @@ -81,7 +81,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -148,7 +148,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -298,7 +298,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -334,7 +334,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -459,7 +459,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -519,7 +519,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -657,7 +657,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -730,7 +730,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -772,7 +772,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -909,7 +909,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -945,7 +945,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -980,7 +980,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1139,7 +1139,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1175,7 +1175,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1210,7 +1210,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1369,7 +1369,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1405,7 +1405,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1440,7 +1440,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1599,7 +1599,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1635,7 +1635,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1670,7 +1670,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out index dc8f47e..92fbbc6 100644 --- ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out @@ -77,7 +77,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -102,7 +102,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -127,7 +127,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out index a5a3670..580e0ad 100644 --- ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out +++ ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out @@ -346,7 +346,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -382,7 +382,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -488,7 +488,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -524,7 +524,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -627,7 +627,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -663,7 +663,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -766,7 +766,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -802,7 +802,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out index 3f00869..f7414ec 100644 --- ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out @@ -3374,7 +3374,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3404,7 +3404,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3487,7 +3487,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3517,7 +3517,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3602,7 +3602,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3632,7 +3632,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3712,7 +3712,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3742,7 +3742,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3830,7 +3830,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3860,7 +3860,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3943,7 +3943,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3973,7 +3973,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4056,7 +4056,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4086,7 +4086,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4166,7 +4166,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4193,7 +4193,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4275,7 +4275,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4305,7 +4305,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4401,7 +4401,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4431,7 +4431,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4515,7 +4515,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4537,7 +4537,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -4567,7 +4567,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4660,7 +4660,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4690,7 +4690,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4781,7 +4781,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4811,7 +4811,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4841,7 +4841,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4931,7 +4931,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4950,7 +4950,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -4977,7 +4977,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5074,7 +5074,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -5093,7 +5093,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -5120,7 +5120,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5221,7 +5221,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -5248,7 +5248,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5267,7 +5267,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -5368,7 +5368,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -5395,7 +5395,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5414,7 +5414,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -5517,7 +5517,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -5544,7 +5544,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5563,7 +5563,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -5690,7 +5690,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5720,7 +5720,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5739,7 +5739,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -5854,7 +5854,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5884,7 +5884,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5974,9 +5974,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6037,9 +6038,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6176,9 +6178,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6239,9 +6242,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6380,9 +6384,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6443,9 +6448,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6584,9 +6590,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6647,9 +6654,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6791,9 +6799,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6854,9 +6863,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -6994,9 +7004,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7061,9 +7072,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7201,9 +7213,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7268,9 +7281,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7405,9 +7419,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7464,9 +7479,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7602,9 +7618,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7665,9 +7682,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7817,9 +7835,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -7881,9 +7900,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -8030,9 +8050,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -8074,9 +8095,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -8137,9 +8159,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -8286,9 +8309,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -8349,9 +8373,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -8499,9 +8524,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -8562,9 +8588,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -8625,9 +8652,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -8770,9 +8798,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -8806,9 +8835,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -8862,9 +8892,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9000,9 +9031,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9036,9 +9068,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9092,9 +9125,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9250,9 +9284,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9306,9 +9341,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9342,9 +9378,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9500,9 +9537,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9556,9 +9594,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9592,9 +9631,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9752,9 +9792,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9808,9 +9849,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -9844,9 +9886,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -10055,9 +10098,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -10118,9 +10162,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -10154,9 +10199,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -10331,9 +10377,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -10394,9 +10441,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -10494,9 +10542,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -10557,9 +10606,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -10697,9 +10747,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -10760,9 +10811,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -10902,9 +10954,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -10965,9 +11018,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -11107,9 +11161,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -11170,9 +11225,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -11315,9 +11371,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -11378,9 +11435,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -11518,9 +11576,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -11586,9 +11645,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -11726,9 +11786,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -11794,9 +11855,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -11931,9 +11993,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -11991,9 +12054,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -12130,9 +12194,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -12193,9 +12258,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -12346,9 +12412,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -12410,9 +12477,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -12559,9 +12627,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -12603,9 +12672,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -12666,9 +12736,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -12816,9 +12887,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -12879,9 +12951,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13029,9 +13102,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13092,9 +13166,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13155,9 +13230,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13300,9 +13376,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13336,9 +13413,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13392,9 +13470,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13530,9 +13609,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13566,9 +13646,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13622,9 +13703,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13780,9 +13862,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13836,9 +13919,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -13872,9 +13956,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -14030,9 +14115,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -14086,9 +14172,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -14122,9 +14209,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -14282,9 +14370,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -14338,9 +14427,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -14374,9 +14464,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -14587,9 +14678,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -14650,9 +14742,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -14686,9 +14779,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -14864,9 +14958,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -14927,9 +15022,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -15027,9 +15123,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -15090,9 +15187,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -15230,9 +15328,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -15293,9 +15392,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -15435,9 +15535,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -15498,9 +15599,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -15640,9 +15742,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -15703,9 +15806,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -15848,9 +15952,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -15911,9 +16016,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -16051,9 +16157,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -16119,9 +16226,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -16259,9 +16367,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -16327,9 +16436,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -16464,9 +16574,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -16524,9 +16635,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -16663,9 +16775,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -16726,9 +16839,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -16879,9 +16993,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -16943,9 +17058,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -17092,9 +17208,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -17136,9 +17253,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -17199,9 +17317,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -17349,9 +17468,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -17412,9 +17532,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -17562,9 +17683,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -17625,9 +17747,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -17688,9 +17811,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -17833,9 +17957,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -17869,9 +17994,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -17925,9 +18051,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18063,9 +18190,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18099,9 +18227,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18155,9 +18284,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18313,9 +18443,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18369,9 +18500,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18405,9 +18537,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18563,9 +18696,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18619,9 +18753,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18655,9 +18790,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18815,9 +18951,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18871,9 +19008,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -18907,9 +19045,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -19120,9 +19259,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -19183,9 +19323,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -19219,9 +19360,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -19397,9 +19539,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -19460,9 +19603,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out new file mode 100644 index 0000000..71201fa --- /dev/null +++ ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out @@ -0,0 +1,353 @@ +PREHOOK: query: CREATE TABLE srcbucket_mapjoin(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@srcbucket_mapjoin +POSTHOOK: query: CREATE TABLE srcbucket_mapjoin(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@srcbucket_mapjoin +PREHOOK: query: CREATE TABLE tab_part (key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tab_part +POSTHOOK: query: CREATE TABLE tab_part (key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tab_part +PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@srcbucket_mapjoin_part +POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@srcbucket_mapjoin_part +PREHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin partition(ds='2008-04-08') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@srcbucket_mapjoin +POSTHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin partition(ds='2008-04-08') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@srcbucket_mapjoin +POSTHOOK: Output: default@srcbucket_mapjoin@ds=2008-04-08 +PREHOOK: query: load data local inpath '../../data/files/bmj1/000001_0' INTO TABLE srcbucket_mapjoin partition(ds='2008-04-08') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@srcbucket_mapjoin@ds=2008-04-08 +POSTHOOK: query: load data local inpath '../../data/files/bmj1/000001_0' INTO TABLE srcbucket_mapjoin partition(ds='2008-04-08') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@srcbucket_mapjoin@ds=2008-04-08 +PREHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@srcbucket_mapjoin_part +POSTHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@srcbucket_mapjoin_part +POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 +PREHOOK: query: load data local inpath '../../data/files/bmj/000001_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 +POSTHOOK: query: load data local inpath '../../data/files/bmj/000001_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 +PREHOOK: query: load data local inpath '../../data/files/bmj/000002_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 +POSTHOOK: query: load data local inpath '../../data/files/bmj/000002_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 +PREHOOK: query: load data local inpath '../../data/files/bmj/000003_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 +POSTHOOK: query: load data local inpath '../../data/files/bmj/000003_0' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 +PREHOOK: query: insert overwrite table tab_part partition (ds='2008-04-08') +select key,value from srcbucket_mapjoin_part +PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin_part +PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 +PREHOOK: Output: default@tab_part@ds=2008-04-08 +POSTHOOK: query: insert overwrite table tab_part partition (ds='2008-04-08') +select key,value from srcbucket_mapjoin_part +POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin_part +POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 +POSTHOOK: Output: default@tab_part@ds=2008-04-08 +POSTHOOK: Lineage: tab_part PARTITION(ds=2008-04-08).key SIMPLE [(srcbucket_mapjoin_part)srcbucket_mapjoin_part.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: tab_part PARTITION(ds=2008-04-08).value SIMPLE [(srcbucket_mapjoin_part)srcbucket_mapjoin_part.FieldSchema(name:value, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE tab(key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tab +POSTHOOK: query: CREATE TABLE tab(key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tab +PREHOOK: query: insert overwrite table tab partition (ds='2008-04-08') +select key,value from srcbucket_mapjoin +PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin +PREHOOK: Input: default@srcbucket_mapjoin@ds=2008-04-08 +PREHOOK: Output: default@tab@ds=2008-04-08 +POSTHOOK: query: insert overwrite table tab partition (ds='2008-04-08') +select key,value from srcbucket_mapjoin +POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin +POSTHOOK: Input: default@srcbucket_mapjoin@ds=2008-04-08 +POSTHOOK: Output: default@tab@ds=2008-04-08 +POSTHOOK: Lineage: tab PARTITION(ds=2008-04-08).key SIMPLE [(srcbucket_mapjoin)srcbucket_mapjoin.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: tab PARTITION(ds=2008-04-08).value SIMPLE [(srcbucket_mapjoin)srcbucket_mapjoin.FieldSchema(name:value, type:string, comment:null), ] +PREHOOK: query: explain vectorization detail +select a.key, a.value, b.value +from tab a join tab_part b on a.key = b.key +order by a.key, a.value, b.value +limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select a.key, a.value, b.value +from tab a join tab_part b on a.key = b.key +order by a.key, a.value, b.value +limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (CUSTOM_EDGE) + Reducer 3 <- Map 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 242 Data size: 22990 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ds:string, 3:ROW__ID:struct] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 0:int) + predicate: key is not null (type: boolean) + Statistics: Num rows: 242 Data size: 22990 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 242 Data size: 22990 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumnNums: [0] + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + partitionColumnNums: [0] + valueColumnNums: [1] + Statistics: Num rows: 242 Data size: 22990 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] + allNative: true + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, value:string + partitionColumnCount: 1 + partitionColumns: ds:string + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ds:string, 3:ROW__ID:struct] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 0:int) + predicate: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyColumnNums: [0] + bigTableRetainedColumnNums: [0, 1] + bigTableValueColumnNums: [1] + className: VectorMapJoinInnerLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + projectedOutputColumnNums: [0, 4, 1] + smallTableMapping: [4] + outputColumnNames: _col0, _col1, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 391 Data size: 72726 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 4, 1] + Statistics: Num rows: 391 Data size: 72726 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) + sort order: +++ + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumnNums: [0, 4, 1] + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumnNums: [] + Statistics: Num rows: 391 Data size: 72726 Basic stats: COMPLETE Column stats: COMPLETE + TopN Hash Memory Usage: 0.1 + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] + allNative: true + usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, value:string + partitionColumnCount: 1 + partitionColumns: ds:string + scratchColumnTypeNames: [string] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: aaa + reduceColumnSortOrder: +++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string, KEY.reducesinkkey2:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 391 Data size: 72726 Basic stats: COMPLETE Column stats: COMPLETE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 10 Data size: 1860 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 1860 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select a.key, a.value, b.value +from tab a join tab_part b on a.key = b.key +order by a.key, a.value, b.value +limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@tab +PREHOOK: Input: default@tab@ds=2008-04-08 +PREHOOK: Input: default@tab_part +PREHOOK: Input: default@tab_part@ds=2008-04-08 +#### A masked pattern was here #### +POSTHOOK: query: select a.key, a.value, b.value +from tab a join tab_part b on a.key = b.key +order by a.key, a.value, b.value +limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tab +POSTHOOK: Input: default@tab@ds=2008-04-08 +POSTHOOK: Input: default@tab_part +POSTHOOK: Input: default@tab_part@ds=2008-04-08 +#### A masked pattern was here #### +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +2 val_2 val_2 diff --git ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out index 37821fb..b0a0d94 100644 --- ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out +++ ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out @@ -69,7 +69,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -113,7 +113,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -168,7 +168,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -341,7 +341,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -386,7 +386,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -442,7 +442,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out index 7c1cbb6..b42bb81 100644 --- ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out +++ ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out @@ -275,6 +275,7 @@ STAGE PLANS: enabled: true inputFormatFeatureSupport: [] featureSupportInUse: [] + inputFileFormats: [] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -302,7 +303,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -331,7 +332,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_multi_insert.q.out ql/src/test/results/clientpositive/llap/vector_multi_insert.q.out index 4bf6a03..4248547 100644 --- ql/src/test/results/clientpositive/llap/vector_multi_insert.q.out +++ ql/src/test/results/clientpositive/llap/vector_multi_insert.q.out @@ -151,7 +151,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_null_projection.q.out ql/src/test/results/clientpositive/llap/vector_null_projection.q.out index fc13ce1..5ca40ed 100644 --- ql/src/test/results/clientpositive/llap/vector_null_projection.q.out +++ ql/src/test/results/clientpositive/llap/vector_null_projection.q.out @@ -72,7 +72,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: Select expression for SELECT operator: Vectorizing data type void not supported when mode = PROJECTION vectorized: false @@ -139,7 +139,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: Key expression for GROUPBY operator: Vectorizing data type void not supported when mode = PROJECTION vectorized: false Map 4 @@ -166,7 +166,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: Key expression for GROUPBY operator: Vectorizing data type void not supported when mode = PROJECTION vectorized: false Reducer 3 diff --git ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out index 9801470..a58c9b2 100644 --- ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out @@ -96,7 +96,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -124,7 +124,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -233,7 +233,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -268,7 +268,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -303,7 +303,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -397,7 +397,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -425,7 +425,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -453,7 +453,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -581,7 +581,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -615,7 +615,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -649,7 +649,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -743,7 +743,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -770,7 +770,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -797,7 +797,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -989,7 +989,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1017,7 +1017,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1126,7 +1126,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1161,7 +1161,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1196,7 +1196,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1290,7 +1290,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1318,7 +1318,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1346,7 +1346,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1474,7 +1474,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1508,7 +1508,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1542,7 +1542,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1636,7 +1636,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1663,7 +1663,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1690,7 +1690,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out index cd2e1e4..ec4c230 100644 --- ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out +++ ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out @@ -166,7 +166,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -292,7 +292,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_nvl.q.out ql/src/test/results/clientpositive/llap/vector_nvl.q.out index 837a574..5261176 100644 --- ql/src/test/results/clientpositive/llap/vector_nvl.q.out +++ ql/src/test/results/clientpositive/llap/vector_nvl.q.out @@ -67,7 +67,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -162,7 +162,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -255,7 +255,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -333,7 +333,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: Select expression for SELECT operator: Vectorizing data type void not supported when mode = PROJECTION vectorized: false diff --git ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out index e4bc4f0..fce9c4d 100644 --- ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out +++ ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out @@ -170,7 +170,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out index 3376b4c..7f88513 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out @@ -137,9 +137,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -182,9 +183,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -272,9 +274,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -335,9 +338,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out index c0885c7..68a75da 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out @@ -297,9 +297,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -342,9 +343,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -471,9 +473,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -515,9 +518,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -767,9 +771,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -811,9 +816,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -855,9 +861,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out index e0f429e..614931e 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out @@ -342,9 +342,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -386,9 +387,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -430,9 +432,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out index 068453f..89b2db7 100644 --- ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out +++ ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out @@ -136,7 +136,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -306,7 +306,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -476,7 +476,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -633,7 +633,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -790,7 +790,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out index 687b4af..f2dd854 100644 --- ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out +++ ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out @@ -299,7 +299,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -477,7 +477,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -971,7 +971,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1206,7 +1206,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1408,7 +1408,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1926,7 +1926,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -2161,7 +2161,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -2363,7 +2363,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -2724,7 +2724,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -2902,7 +2902,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3396,7 +3396,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3631,7 +3631,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -3833,7 +3833,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4351,7 +4351,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4586,7 +4586,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -4788,7 +4788,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out index 5eaed53..9da4608 100644 --- ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out +++ ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out @@ -155,9 +155,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -415,9 +416,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -644,9 +646,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -874,9 +877,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1136,9 +1140,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1366,9 +1371,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1597,9 +1603,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1860,9 +1867,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -2091,9 +2099,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -2314,9 +2323,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -2542,9 +2552,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -2770,9 +2781,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -2968,9 +2980,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -3198,9 +3211,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -3428,9 +3442,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -3627,9 +3642,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -3858,9 +3874,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -4089,9 +4106,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -4328,9 +4346,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -4557,9 +4576,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -4806,9 +4826,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -5035,9 +5056,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -5258,9 +5280,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -5458,9 +5481,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -5659,9 +5683,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -5829,9 +5854,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/llap/vector_reduce1.q.out ql/src/test/results/clientpositive/llap/vector_reduce1.q.out index c4a6ae6..0144c0a 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce1.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce1.q.out @@ -153,7 +153,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_reduce2.q.out ql/src/test/results/clientpositive/llap/vector_reduce2.q.out index 03c8bc8..1a3613c 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce2.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce2.q.out @@ -153,7 +153,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_reduce3.q.out ql/src/test/results/clientpositive/llap/vector_reduce3.q.out index a6a1f56..1da7aea 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce3.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce3.q.out @@ -153,7 +153,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out index 3f92327..7cbce33 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out @@ -91,7 +91,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out index 07c1e41..ad3e321 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out @@ -128,9 +128,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_string_concat.q.out ql/src/test/results/clientpositive/llap/vector_string_concat.q.out index 2ceef58..1d03761 100644 --- ql/src/test/results/clientpositive/llap/vector_string_concat.q.out +++ ql/src/test/results/clientpositive/llap/vector_string_concat.q.out @@ -157,7 +157,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -381,7 +381,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_struct_in.q.out ql/src/test/results/clientpositive/llap/vector_struct_in.q.out index 4af1015..e39b9cf 100644 --- ql/src/test/results/clientpositive/llap/vector_struct_in.q.out +++ ql/src/test/results/clientpositive/llap/vector_struct_in.q.out @@ -96,7 +96,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -213,7 +213,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -354,7 +354,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -471,7 +471,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -612,7 +612,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -729,7 +729,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -873,7 +873,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -993,7 +993,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_udf1.q.out ql/src/test/results/clientpositive/llap/vector_udf1.q.out index 2969723..ed3b6fb 100644 --- ql/src/test/results/clientpositive/llap/vector_udf1.q.out +++ ql/src/test/results/clientpositive/llap/vector_udf1.q.out @@ -97,9 +97,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -198,9 +199,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -299,9 +301,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -400,9 +403,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -501,9 +505,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -602,9 +607,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -703,9 +709,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -804,9 +811,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -905,9 +913,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -1006,9 +1015,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -1107,9 +1117,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -1208,9 +1219,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -1309,9 +1321,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -1410,9 +1423,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -1511,9 +1525,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -1612,9 +1627,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -1713,9 +1729,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -1814,9 +1831,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -1915,9 +1933,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -2016,9 +2035,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -2117,9 +2137,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -2216,9 +2237,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -2313,9 +2335,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -2410,9 +2433,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -2509,9 +2533,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -2610,9 +2635,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -2695,8 +2721,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: Aggregation Function expression for GROUPBY operator: UDF compute_stats not supported + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Reducer 2 Execution mode: llap @@ -2813,9 +2840,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 @@ -2956,9 +2984,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 8 diff --git ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out index c04cc61..ac42548 100644 --- ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out +++ ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out @@ -176,7 +176,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out index 117246e..b4fce6c 100644 --- ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out +++ ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out @@ -179,7 +179,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -208,7 +208,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -296,7 +296,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -334,7 +334,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -424,7 +424,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -462,7 +462,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_varchar_simple.q.out ql/src/test/results/clientpositive/llap/vector_varchar_simple.q.out index 6d2d728..dbae7ef 100644 --- ql/src/test/results/clientpositive/llap/vector_varchar_simple.q.out +++ ql/src/test/results/clientpositive/llap/vector_varchar_simple.q.out @@ -93,7 +93,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -213,7 +213,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -347,7 +347,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out index 8d3f163..4c2fd05 100644 --- ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out +++ ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out @@ -85,7 +85,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vector_windowing.q.out ql/src/test/results/clientpositive/llap/vector_windowing.q.out index 8dfee97..2edf054 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing.q.out @@ -58,9 +58,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -261,9 +262,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -469,9 +471,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -648,9 +651,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -843,9 +847,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1045,9 +1050,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1253,9 +1259,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1298,9 +1305,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1539,9 +1547,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1699,9 +1708,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1876,9 +1886,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -2055,9 +2066,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -2246,9 +2258,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -2413,9 +2426,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -2580,9 +2594,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -2800,9 +2815,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -3092,9 +3108,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -3371,9 +3388,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -3624,9 +3642,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -3838,9 +3857,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -4071,9 +4091,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -4274,9 +4295,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -4542,9 +4564,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -4847,9 +4870,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -5020,8 +5044,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] notVectorizedReason: Lateral View Forward (LATERALVIEWFORWARD) not supported + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: false Reducer 2 Execution mode: llap @@ -5362,9 +5387,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -6119,9 +6145,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -6302,9 +6329,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -6467,9 +6495,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -6624,9 +6653,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -6787,9 +6817,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -6960,9 +6991,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -7127,9 +7159,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -7304,9 +7337,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -7485,9 +7519,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -7665,9 +7700,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -7863,9 +7899,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -8059,9 +8096,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -8283,9 +8321,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -8524,9 +8563,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -8713,9 +8753,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -8868,9 +8909,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -9056,9 +9098,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -9211,9 +9254,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -9370,9 +9414,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -9603,9 +9648,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -9744,9 +9790,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -9893,9 +9940,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out index 7e9a564..cada29e 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out @@ -104,9 +104,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -298,9 +299,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -459,9 +461,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -683,9 +686,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -907,9 +911,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1132,9 +1137,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1356,9 +1362,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1535,9 +1542,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1702,9 +1710,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1946,9 +1955,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out index 74997d3..7ffd9ad 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out @@ -72,9 +72,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -124,9 +125,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out index c80fefa..5af70fd 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out @@ -76,9 +76,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -319,9 +320,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -556,9 +558,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -915,9 +918,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 5 @@ -967,9 +971,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out index d215930..184e58d 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out @@ -96,9 +96,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -10271,9 +10272,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -10548,9 +10550,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -10816,9 +10819,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -11088,9 +11092,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -11369,9 +11374,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out index 8dc3fa7..07a2677 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out @@ -102,9 +102,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -248,9 +249,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -507,9 +509,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -731,9 +734,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -956,9 +960,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1214,9 +1219,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1448,9 +1454,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1641,9 +1648,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1814,9 +1822,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1987,9 +1996,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -2160,9 +2170,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out index 8aa904f..8eebae5 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out @@ -103,9 +103,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -236,9 +237,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -370,9 +372,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -503,9 +506,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -637,9 +641,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -807,9 +812,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -972,9 +978,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1137,9 +1144,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out index bf2591c..0c6db81 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out @@ -96,9 +96,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -354,9 +355,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -577,9 +579,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -800,9 +803,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -10950,9 +10954,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -11208,9 +11213,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -11466,9 +11472,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -11724,9 +11731,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -11982,9 +11990,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -12208,9 +12217,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -12434,9 +12444,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out index aa47b1b..2710b40 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out @@ -96,9 +96,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -355,9 +356,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -579,9 +581,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -804,9 +807,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1064,9 +1068,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1116,9 +1121,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1365,9 +1371,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1417,9 +1424,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1668,9 +1676,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1720,9 +1729,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out index 2c54027..d98c6f5 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out @@ -97,9 +97,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -244,9 +245,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -452,9 +454,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -855,9 +858,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out index 2d48bd5..4093d2d 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out @@ -95,9 +95,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -318,9 +319,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -542,9 +544,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -765,9 +768,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -988,9 +992,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1212,9 +1217,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1470,9 +1476,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1728,9 +1735,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -1952,9 +1960,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -2119,9 +2128,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 @@ -2286,9 +2296,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 11 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out index 07614ac..cc969f4 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out @@ -94,9 +94,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/llap/vectorization_0.q.out ql/src/test/results/clientpositive/llap/vectorization_0.q.out index 2333716..cd7fba0 100644 --- ql/src/test/results/clientpositive/llap/vectorization_0.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_0.q.out @@ -76,9 +76,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -257,9 +258,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -427,7 +429,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -584,9 +586,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -765,9 +768,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -935,7 +939,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1092,9 +1096,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -1273,9 +1278,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -1443,7 +1449,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1645,9 +1651,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_1.q.out ql/src/test/results/clientpositive/llap/vectorization_1.q.out index 278bd0c..2f57b43 100644 --- ql/src/test/results/clientpositive/llap/vectorization_1.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_1.q.out @@ -108,9 +108,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_10.q.out ql/src/test/results/clientpositive/llap/vectorization_10.q.out index b6c68fb..3bae4f3 100644 --- ql/src/test/results/clientpositive/llap/vectorization_10.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_10.q.out @@ -100,9 +100,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_11.q.out ql/src/test/results/clientpositive/llap/vectorization_11.q.out index bb0feec..91d6e8e 100644 --- ql/src/test/results/clientpositive/llap/vectorization_11.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_11.q.out @@ -82,9 +82,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_12.q.out ql/src/test/results/clientpositive/llap/vectorization_12.q.out index 1285b25..6b236d1 100644 --- ql/src/test/results/clientpositive/llap/vectorization_12.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_12.q.out @@ -135,9 +135,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_13.q.out ql/src/test/results/clientpositive/llap/vectorization_13.q.out index e50f3e2..fb420f0 100644 --- ql/src/test/results/clientpositive/llap/vectorization_13.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_13.q.out @@ -137,9 +137,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -488,7 +489,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorization_14.q.out ql/src/test/results/clientpositive/llap/vectorization_14.q.out index 0f77070..ae2e168 100644 --- ql/src/test/results/clientpositive/llap/vectorization_14.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_14.q.out @@ -138,9 +138,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_15.q.out ql/src/test/results/clientpositive/llap/vectorization_15.q.out index ae4fe18..d78ec2b 100644 --- ql/src/test/results/clientpositive/llap/vectorization_15.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_15.q.out @@ -133,9 +133,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_16.q.out ql/src/test/results/clientpositive/llap/vectorization_16.q.out index 7cf60ae..0d47419 100644 --- ql/src/test/results/clientpositive/llap/vectorization_16.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_16.q.out @@ -110,9 +110,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_17.q.out ql/src/test/results/clientpositive/llap/vectorization_17.q.out index bdcb1eb..c77a70e 100644 --- ql/src/test/results/clientpositive/llap/vectorization_17.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_17.q.out @@ -104,9 +104,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_2.q.out ql/src/test/results/clientpositive/llap/vectorization_2.q.out index 4dfb73d..87918d9 100644 --- ql/src/test/results/clientpositive/llap/vectorization_2.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_2.q.out @@ -112,9 +112,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_3.q.out ql/src/test/results/clientpositive/llap/vectorization_3.q.out index 6bff739..9070bf8 100644 --- ql/src/test/results/clientpositive/llap/vectorization_3.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_3.q.out @@ -117,9 +117,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_4.q.out ql/src/test/results/clientpositive/llap/vectorization_4.q.out index a38c77c..062d1dd 100644 --- ql/src/test/results/clientpositive/llap/vectorization_4.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_4.q.out @@ -112,9 +112,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_5.q.out ql/src/test/results/clientpositive/llap/vectorization_5.q.out index d41de01..8760d54 100644 --- ql/src/test/results/clientpositive/llap/vectorization_5.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_5.q.out @@ -106,9 +106,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_6.q.out ql/src/test/results/clientpositive/llap/vectorization_6.q.out index 84b9260..55dc76d 100644 --- ql/src/test/results/clientpositive/llap/vectorization_6.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_6.q.out @@ -94,9 +94,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_7.q.out ql/src/test/results/clientpositive/llap/vectorization_7.q.out index 3c75229..0e9ffe9 100644 --- ql/src/test/results/clientpositive/llap/vectorization_7.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_7.q.out @@ -110,9 +110,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -357,7 +358,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorization_8.q.out ql/src/test/results/clientpositive/llap/vectorization_8.q.out index 22a1b34..7870a88 100644 --- ql/src/test/results/clientpositive/llap/vectorization_8.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_8.q.out @@ -106,9 +106,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -340,7 +341,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorization_9.q.out ql/src/test/results/clientpositive/llap/vectorization_9.q.out index 7cf60ae..0d47419 100644 --- ql/src/test/results/clientpositive/llap/vectorization_9.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_9.q.out @@ -110,9 +110,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out index c83d5e6..d5b8937 100644 --- ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out @@ -74,7 +74,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out index 516b1c4..91fd5cf 100644 --- ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out @@ -98,7 +98,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -195,7 +195,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -427,8 +427,8 @@ STAGE PLANS: LLAP IO: all inputs (cache only) Map Vectorization: enabled: false - enabledConditionsNotMet: hive.vectorized.use.row.serde.deserialize IS true AND hive.vectorized.row.serde.inputformat.excludes NOT CONTAINS org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat IS false, hive.vectorized.use.vectorized.input.format IS true AND hive.vectorized.input.format.excludes NOT CONTAINS org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat IS false - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + enabledConditionsNotMet: Row deserialization of vectorized input format not supported IS false, hive.vectorized.use.vectorized.input.format IS true AND hive.vectorized.input.format.excludes NOT CONTAINS org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat IS false + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] Stage: Stage-0 Fetch Operator @@ -519,8 +519,8 @@ STAGE PLANS: LLAP IO: all inputs (cache only) Map Vectorization: enabled: false - enabledConditionsNotMet: hive.vectorized.use.row.serde.deserialize IS true AND hive.vectorized.row.serde.inputformat.excludes NOT CONTAINS org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat IS false, hive.vectorized.use.vectorized.input.format IS true AND hive.vectorized.input.format.excludes NOT CONTAINS org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat IS false - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + enabledConditionsNotMet: Row deserialization of vectorized input format not supported IS false, hive.vectorized.use.vectorized.input.format IS true AND hive.vectorized.input.format.excludes NOT CONTAINS org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat IS false + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -752,7 +752,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -849,7 +849,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1130,7 +1130,7 @@ STAGE PLANS: Map Vectorization: enabled: false enabledConditionsNotMet: hive.vectorized.use.vectorized.input.format IS true AND hive.vectorized.input.format.excludes NOT CONTAINS org.apache.hadoop.hive.ql.io.orc.OrcInputFormat IS false - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] Stage: Stage-0 Fetch Operator @@ -1222,7 +1222,7 @@ STAGE PLANS: Map Vectorization: enabled: false enabledConditionsNotMet: hive.vectorized.use.vectorized.input.format IS true AND hive.vectorized.input.format.excludes NOT CONTAINS org.apache.hadoop.hive.ql.io.orc.OrcInputFormat IS false - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: diff --git ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out index e46c7f4..b282967 100644 --- ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out @@ -70,15 +70,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_part - Statistics: Num rows: 200 Data size: 1592 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: PARTIAL Select Operator expressions: (cdouble + 2.0) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: PARTIAL Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: PARTIAL TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap LLAP IO: all inputs @@ -87,7 +87,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -103,13 +103,13 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: PARTIAL Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: PARTIAL File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: PARTIAL table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out index ca2aa87..d548da8 100644 --- ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out @@ -48,7 +48,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out index 0027ab5..bdb70e8 100644 --- ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out @@ -137,7 +137,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -398,7 +398,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -651,7 +651,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -883,7 +883,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1113,7 +1113,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1410,7 +1410,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1657,7 +1657,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1961,7 +1961,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -2220,7 +2220,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -2496,7 +2496,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -2816,7 +2816,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3217,7 +3217,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3452,7 +3452,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3567,7 +3567,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3754,7 +3754,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3869,7 +3869,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3984,7 +3984,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4099,7 +4099,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4214,7 +4214,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4329,7 +4329,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out index 61c5051..6391b4a 100644 --- ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out @@ -147,7 +147,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -182,7 +182,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -282,7 +282,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -317,7 +317,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.RCFileInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.RCFileInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -417,7 +417,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -453,7 +453,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_case.q.out ql/src/test/results/clientpositive/llap/vectorized_case.q.out index 8dcff32..6c1d95a 100644 --- ql/src/test/results/clientpositive/llap/vectorized_case.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_case.q.out @@ -87,7 +87,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -229,7 +229,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -311,7 +311,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -439,7 +439,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_casts.q.out ql/src/test/results/clientpositive/llap/vectorized_casts.q.out index 84b4d94..15eb706 100644 --- ql/src/test/results/clientpositive/llap/vectorized_casts.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_casts.q.out @@ -202,9 +202,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/llap/vectorized_context.q.out ql/src/test/results/clientpositive/llap/vectorized_context.q.out index 8907c7f..08273de 100644 --- ql/src/test/results/clientpositive/llap/vectorized_context.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_context.q.out @@ -165,7 +165,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -193,7 +193,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -222,7 +222,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out index db29250..5a23152 100644 --- ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out @@ -290,7 +290,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -582,7 +582,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -878,7 +878,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1152,7 +1152,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1282,7 +1282,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out index 22f2894..d859dd4 100644 --- ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out @@ -84,7 +84,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -263,7 +263,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -307,7 +307,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -421,7 +421,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -450,7 +450,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -580,7 +580,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -624,7 +624,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -668,7 +668,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -806,7 +806,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -835,7 +835,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -864,7 +864,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1008,7 +1008,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1067,7 +1067,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1181,7 +1181,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1210,7 +1210,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1335,7 +1335,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1379,7 +1379,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1493,7 +1493,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1522,7 +1522,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1645,7 +1645,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1689,7 +1689,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1803,7 +1803,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1847,7 +1847,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1961,7 +1961,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1990,7 +1990,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -2104,7 +2104,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -2133,7 +2133,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -2260,7 +2260,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -2304,7 +2304,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -2439,7 +2439,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -2579,7 +2579,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -2607,7 +2607,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -2725,7 +2725,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -2784,7 +2784,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -2898,7 +2898,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -2942,7 +2942,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3054,7 +3054,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3080,7 +3080,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -3174,7 +3174,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -3218,7 +3218,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3316,7 +3316,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -3360,7 +3360,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3404,7 +3404,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3539,7 +3539,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -3568,6 +3568,7 @@ STAGE PLANS: enabled: true inputFormatFeatureSupport: [] featureSupportInUse: [] + inputFileFormats: [] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -3596,7 +3597,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -3724,7 +3725,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -3754,7 +3755,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3784,7 +3785,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -3984,7 +3985,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -4014,7 +4015,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4044,7 +4045,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4249,7 +4250,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4279,7 +4280,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4307,7 +4308,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4337,7 +4338,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4599,7 +4600,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4643,7 +4644,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4775,7 +4776,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4819,7 +4820,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4863,7 +4864,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -4985,7 +4986,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5044,7 +5045,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5163,7 +5164,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5207,7 +5208,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5315,7 +5316,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5359,7 +5360,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5467,7 +5468,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5511,7 +5512,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5627,7 +5628,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5781,7 +5782,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5825,7 +5826,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5916,7 +5917,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -5942,7 +5943,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -6017,7 +6018,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -6059,7 +6060,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -6159,7 +6160,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -6203,7 +6204,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -6247,7 +6248,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -6357,7 +6358,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -6386,6 +6387,7 @@ STAGE PLANS: enabled: true inputFormatFeatureSupport: [] featureSupportInUse: [] + inputFileFormats: [] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -6427,7 +6429,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -6535,7 +6537,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -6565,7 +6567,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -6595,7 +6597,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out index 228bd9d..8cc9ec7 100644 --- ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out @@ -86,7 +86,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -157,7 +157,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -320,7 +320,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -391,7 +391,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -554,7 +554,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -625,7 +625,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -789,7 +789,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -860,7 +860,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -931,7 +931,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1126,7 +1126,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1225,7 +1225,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1417,7 +1417,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -1488,7 +1488,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out index 0dff57a..a98f7bf 100644 --- ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out @@ -95,7 +95,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -137,7 +137,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out index 36f1bbf..dbb1156 100644 --- ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out @@ -155,7 +155,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out index 80e1cab..399d179 100644 --- ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out @@ -71,7 +71,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -100,7 +100,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -128,7 +128,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_parquet.q.out ql/src/test/results/clientpositive/llap/vectorized_parquet.q.out index 1724751..ec469bb 100644 --- ql/src/test/results/clientpositive/llap/vectorized_parquet.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_parquet.q.out @@ -165,7 +165,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_parquet_types.q.out ql/src/test/results/clientpositive/llap/vectorized_parquet_types.q.out index 05e34d6..940a9d3 100644 --- ql/src/test/results/clientpositive/llap/vectorized_parquet_types.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_parquet_types.q.out @@ -320,7 +320,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -609,7 +609,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out index 9e06471..495fca4 100644 --- ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out @@ -169,9 +169,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -389,9 +390,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -432,9 +434,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -642,9 +645,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -801,9 +805,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1020,9 +1025,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1242,9 +1248,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1466,9 +1473,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1509,9 +1517,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1687,9 +1696,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1725,9 +1735,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -1905,8 +1916,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: PTF operator: PTF Mapper not supported + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Reducer 2 Execution mode: llap @@ -2100,8 +2112,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: PTF operator: PTF Mapper not supported + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: false Reducer 2 Execution mode: llap @@ -2309,9 +2322,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -2527,9 +2541,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -2815,9 +2830,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -3037,9 +3053,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -3080,9 +3097,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -3330,9 +3348,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -3568,9 +3587,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -3831,9 +3851,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -4347,9 +4368,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -4665,9 +4687,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -4979,9 +5002,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -5301,9 +5325,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -5634,9 +5659,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 @@ -5936,9 +5962,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 9 diff --git ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out index 72216d3..f47bb0f 100644 --- ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out @@ -62,7 +62,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -104,7 +104,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out index 1f6e152..c417ea4 100644 --- ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out @@ -81,7 +81,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out index 0e38b0b..0312f47 100644 --- ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out @@ -168,9 +168,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -300,9 +301,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -393,9 +395,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -533,9 +536,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 diff --git ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out index 7986494..250eb3c 100644 --- ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out @@ -137,7 +137,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -319,7 +319,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -501,7 +501,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -683,7 +683,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -826,7 +826,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -953,7 +953,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1097,7 +1097,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out index c346ff2..e8bf577 100644 --- ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out @@ -87,7 +87,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -251,7 +251,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true diff --git ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out index fa72556..bf807f7 100644 --- ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out +++ ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out @@ -85,7 +85,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -206,7 +206,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/tez/vectorization_div0.q.out ql/src/test/results/clientpositive/tez/vectorization_div0.q.out index af87ffe..bce77b4 100644 --- ql/src/test/results/clientpositive/tez/vectorization_div0.q.out +++ ql/src/test/results/clientpositive/tez/vectorization_div0.q.out @@ -55,7 +55,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -237,7 +237,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -454,7 +454,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -671,7 +671,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: true usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_aggregate_9.q.out ql/src/test/results/clientpositive/vector_aggregate_9.q.out index 99d23b1..36fe494 100644 --- ql/src/test/results/clientpositive/vector_aggregate_9.q.out +++ ql/src/test/results/clientpositive/vector_aggregate_9.q.out @@ -160,9 +160,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 13 @@ -262,9 +263,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 13 @@ -364,9 +366,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 13 diff --git ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out index eab3b7a..ff1ed5d 100644 --- ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out +++ ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out @@ -99,9 +99,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/vector_between_columns.q.out ql/src/test/results/clientpositive/vector_between_columns.q.out index c65ef71..cc84938 100644 --- ql/src/test/results/clientpositive/vector_between_columns.q.out +++ ql/src/test/results/clientpositive/vector_between_columns.q.out @@ -153,7 +153,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -302,7 +302,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true diff --git ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out index 3f9e90b..bc08a0a 100644 --- ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out +++ ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out @@ -213,7 +213,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -257,7 +257,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -366,7 +366,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -414,7 +414,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -584,7 +584,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_bround.q.out ql/src/test/results/clientpositive/vector_bround.q.out index a2b745a..435a5a7 100644 --- ql/src/test/results/clientpositive/vector_bround.q.out +++ ql/src/test/results/clientpositive/vector_bround.q.out @@ -83,9 +83,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/vector_cast_constant.q.out ql/src/test/results/clientpositive/vector_cast_constant.q.out index d16c7e0..bc01b28 100644 --- ql/src/test/results/clientpositive/vector_cast_constant.q.out +++ ql/src/test/results/clientpositive/vector_cast_constant.q.out @@ -167,7 +167,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -212,7 +212,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_char_2.q.out ql/src/test/results/clientpositive/vector_char_2.q.out index ea0ddff..2f140c7 100644 --- ql/src/test/results/clientpositive/vector_char_2.q.out +++ ql/src/test/results/clientpositive/vector_char_2.q.out @@ -118,7 +118,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -163,7 +163,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -306,7 +306,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -351,7 +351,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_char_4.q.out ql/src/test/results/clientpositive/vector_char_4.q.out index b388550..7d2b333 100644 --- ql/src/test/results/clientpositive/vector_char_4.q.out +++ ql/src/test/results/clientpositive/vector_char_4.q.out @@ -176,7 +176,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out index c98bb44..280b9e0 100644 --- ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out +++ ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out @@ -215,7 +215,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -348,7 +348,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -483,7 +483,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_char_simple.q.out ql/src/test/results/clientpositive/vector_char_simple.q.out index 43c3e48..2de29ff 100644 --- ql/src/test/results/clientpositive/vector_char_simple.q.out +++ ql/src/test/results/clientpositive/vector_char_simple.q.out @@ -72,7 +72,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -149,7 +149,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -238,7 +238,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_coalesce.q.out ql/src/test/results/clientpositive/vector_coalesce.q.out index d1b12e6..84e9cd6 100644 --- ql/src/test/results/clientpositive/vector_coalesce.q.out +++ ql/src/test/results/clientpositive/vector_coalesce.q.out @@ -44,7 +44,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -129,7 +129,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -215,7 +215,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -295,7 +295,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -381,7 +381,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -460,7 +460,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_coalesce_2.q.out ql/src/test/results/clientpositive/vector_coalesce_2.q.out index a5a7915..318c2e1 100644 --- ql/src/test/results/clientpositive/vector_coalesce_2.q.out +++ ql/src/test/results/clientpositive/vector_coalesce_2.q.out @@ -224,7 +224,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -323,7 +323,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_complex_join.q.out ql/src/test/results/clientpositive/vector_complex_join.q.out index 487ba5b..a661016 100644 --- ql/src/test/results/clientpositive/vector_complex_join.q.out +++ ql/src/test/results/clientpositive/vector_complex_join.q.out @@ -111,7 +111,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -263,7 +263,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -378,7 +378,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_count.q.out ql/src/test/results/clientpositive/vector_count.q.out index 7e30870..cec11bf 100644 --- ql/src/test/results/clientpositive/vector_count.q.out +++ ql/src/test/results/clientpositive/vector_count.q.out @@ -103,7 +103,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -181,7 +181,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: GROUPBY operator: Aggregations with > 1 parameter are not supported count([Column[a], Column[b]]) vectorized: false Reduce Vectorization: @@ -267,7 +267,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -352,7 +352,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_data_types.q.out ql/src/test/results/clientpositive/vector_data_types.q.out index 2b6491f..7ca9acd 100644 --- ql/src/test/results/clientpositive/vector_data_types.q.out +++ ql/src/test/results/clientpositive/vector_data_types.q.out @@ -231,7 +231,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_date_1.q.out ql/src/test/results/clientpositive/vector_date_1.q.out index 3f2b212..a3e01cf 100644 --- ql/src/test/results/clientpositive/vector_date_1.q.out +++ ql/src/test/results/clientpositive/vector_date_1.q.out @@ -699,7 +699,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_decimal_1.q.out ql/src/test/results/clientpositive/vector_decimal_1.q.out index d922bfb..9b895b1 100644 --- ql/src/test/results/clientpositive/vector_decimal_1.q.out +++ ql/src/test/results/clientpositive/vector_decimal_1.q.out @@ -80,9 +80,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -170,9 +171,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -260,9 +262,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -350,9 +353,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -440,9 +444,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -530,9 +535,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -620,9 +626,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -710,9 +717,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -800,9 +808,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/vector_decimal_10_0.q.out ql/src/test/results/clientpositive/vector_decimal_10_0.q.out index bc5208c..afe70e0 100644 --- ql/src/test/results/clientpositive/vector_decimal_10_0.q.out +++ ql/src/test/results/clientpositive/vector_decimal_10_0.q.out @@ -80,9 +80,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -170,9 +171,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 1 diff --git ql/src/test/results/clientpositive/vector_decimal_6.q.out ql/src/test/results/clientpositive/vector_decimal_6.q.out index 8896459..5d92afa 100644 --- ql/src/test/results/clientpositive/vector_decimal_6.q.out +++ ql/src/test/results/clientpositive/vector_decimal_6.q.out @@ -153,9 +153,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -268,9 +269,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -537,9 +539,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out index b2fb974..042e089 100644 --- ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out +++ ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out @@ -93,9 +93,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -234,9 +235,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -397,9 +399,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -538,9 +541,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 4 diff --git ql/src/test/results/clientpositive/vector_decimal_cast.q.out ql/src/test/results/clientpositive/vector_decimal_cast.q.out index 55a8a46..6657563 100644 --- ql/src/test/results/clientpositive/vector_decimal_cast.q.out +++ ql/src/test/results/clientpositive/vector_decimal_cast.q.out @@ -58,9 +58,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -177,9 +178,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vector_decimal_expressions.q.out ql/src/test/results/clientpositive/vector_decimal_expressions.q.out index 434fe22..aa9b8c6 100644 --- ql/src/test/results/clientpositive/vector_decimal_expressions.q.out +++ ql/src/test/results/clientpositive/vector_decimal_expressions.q.out @@ -71,9 +71,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -202,9 +203,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out index 6fb0c34..5a0b470 100644 --- ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out +++ ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out @@ -169,9 +169,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -399,9 +400,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -705,9 +707,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -830,9 +833,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -956,9 +960,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1082,9 +1087,10 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] vectorizationSupportRemovedReasons: [[] is disabled because it is not in hive.vectorized.input.format.supports.enabled []] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out index 9f4d478..cd3ec15 100644 --- ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out +++ ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out @@ -136,9 +136,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 @@ -378,9 +379,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 4 diff --git ql/src/test/results/clientpositive/vector_decimal_precision.q.out ql/src/test/results/clientpositive/vector_decimal_precision.q.out index e783650..6b85c3a 100644 --- ql/src/test/results/clientpositive/vector_decimal_precision.q.out +++ ql/src/test/results/clientpositive/vector_decimal_precision.q.out @@ -606,9 +606,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1187,9 +1188,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 1 diff --git ql/src/test/results/clientpositive/vector_decimal_round.q.out ql/src/test/results/clientpositive/vector_decimal_round.q.out index 79c2cd5..ff89c84 100644 --- ql/src/test/results/clientpositive/vector_decimal_round.q.out +++ ql/src/test/results/clientpositive/vector_decimal_round.q.out @@ -79,9 +79,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -170,9 +171,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -289,9 +291,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.RCFileInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.RCFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe, org.apache.hadoop.hive.ql.io.RCFileInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -380,9 +383,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.row.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.RCFileInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.RCFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(ROW_DESERIALIZE, org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe, org.apache.hadoop.hive.ql.io.RCFileInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -499,9 +503,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -590,9 +595,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 diff --git ql/src/test/results/clientpositive/vector_decimal_round_2.q.out ql/src/test/results/clientpositive/vector_decimal_round_2.q.out index 76dd8ab..635af30 100644 --- ql/src/test/results/clientpositive/vector_decimal_round_2.q.out +++ ql/src/test/results/clientpositive/vector_decimal_round_2.q.out @@ -83,9 +83,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -225,9 +226,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -394,9 +396,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -552,9 +555,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/vector_decimal_trailing.q.out ql/src/test/results/clientpositive/vector_decimal_trailing.q.out index 2f8e5c9..9f4d749 100644 --- ql/src/test/results/clientpositive/vector_decimal_trailing.q.out +++ ql/src/test/results/clientpositive/vector_decimal_trailing.q.out @@ -113,9 +113,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/vector_decimal_udf2.q.out ql/src/test/results/clientpositive/vector_decimal_udf2.q.out index 541adfb..21ca225 100644 --- ql/src/test/results/clientpositive/vector_decimal_udf2.q.out +++ ql/src/test/results/clientpositive/vector_decimal_udf2.q.out @@ -106,9 +106,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -198,9 +199,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -290,9 +292,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -382,9 +385,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/vector_distinct_2.q.out ql/src/test/results/clientpositive/vector_distinct_2.q.out index c3d2d89..7ba695b 100644 --- ql/src/test/results/clientpositive/vector_distinct_2.q.out +++ ql/src/test/results/clientpositive/vector_distinct_2.q.out @@ -160,7 +160,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_elt.q.out ql/src/test/results/clientpositive/vector_elt.q.out index 0b51d83..84ce264 100644 --- ql/src/test/results/clientpositive/vector_elt.q.out +++ ql/src/test/results/clientpositive/vector_elt.q.out @@ -61,7 +61,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -168,7 +168,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_empty_where.q.out ql/src/test/results/clientpositive/vector_empty_where.q.out index 6b2c7fe..f7a67d7 100644 --- ql/src/test/results/clientpositive/vector_empty_where.q.out +++ ql/src/test/results/clientpositive/vector_empty_where.q.out @@ -65,7 +65,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -112,7 +112,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -208,7 +208,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -255,7 +255,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -359,7 +359,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -406,7 +406,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -510,7 +510,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -557,7 +557,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_gather_stats.q.out ql/src/test/results/clientpositive/vector_gather_stats.q.out index 0c87383..ce4bbd9 100644 --- ql/src/test/results/clientpositive/vector_gather_stats.q.out +++ ql/src/test/results/clientpositive/vector_gather_stats.q.out @@ -81,8 +81,9 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] notVectorizedReason: TABLESCAN operator: gather stats not supported + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: false Stage: Stage-1 diff --git ql/src/test/results/clientpositive/vector_groupby4.q.out ql/src/test/results/clientpositive/vector_groupby4.q.out index 443385b..5ed341e 100644 --- ql/src/test/results/clientpositive/vector_groupby4.q.out +++ ql/src/test/results/clientpositive/vector_groupby4.q.out @@ -72,7 +72,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -115,7 +115,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -188,7 +188,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -230,7 +230,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_groupby6.q.out ql/src/test/results/clientpositive/vector_groupby6.q.out index af206f3..fa31f96 100644 --- ql/src/test/results/clientpositive/vector_groupby6.q.out +++ ql/src/test/results/clientpositive/vector_groupby6.q.out @@ -72,7 +72,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -115,7 +115,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -188,7 +188,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -230,7 +230,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_groupby_3.q.out ql/src/test/results/clientpositive/vector_groupby_3.q.out index dfac04d..54d88be 100644 --- ql/src/test/results/clientpositive/vector_groupby_3.q.out +++ ql/src/test/results/clientpositive/vector_groupby_3.q.out @@ -163,7 +163,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_groupby_mapjoin.q.out ql/src/test/results/clientpositive/vector_groupby_mapjoin.q.out index bfe2d80..8a58b3f 100644 --- ql/src/test/results/clientpositive/vector_groupby_mapjoin.q.out +++ ql/src/test/results/clientpositive/vector_groupby_mapjoin.q.out @@ -73,7 +73,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -149,7 +149,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -224,7 +224,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -253,7 +253,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -358,7 +358,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_groupby_reduce.q.out ql/src/test/results/clientpositive/vector_groupby_reduce.q.out index 5ac1ea8..372cb23 100644 --- ql/src/test/results/clientpositive/vector_groupby_reduce.q.out +++ ql/src/test/results/clientpositive/vector_groupby_reduce.q.out @@ -289,7 +289,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -332,7 +332,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -480,7 +480,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -532,7 +532,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -757,7 +757,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -812,7 +812,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -955,7 +955,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1013,7 +1013,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_grouping_sets.q.out ql/src/test/results/clientpositive/vector_grouping_sets.q.out index f08c270..b3c2ef8 100644 --- ql/src/test/results/clientpositive/vector_grouping_sets.q.out +++ ql/src/test/results/clientpositive/vector_grouping_sets.q.out @@ -186,7 +186,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -297,7 +297,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_if_expr.q.out ql/src/test/results/clientpositive/vector_if_expr.q.out index c4d196c..639cca2 100644 --- ql/src/test/results/clientpositive/vector_if_expr.q.out +++ ql/src/test/results/clientpositive/vector_if_expr.q.out @@ -53,7 +53,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_if_expr_2.q.out ql/src/test/results/clientpositive/vector_if_expr_2.q.out index fe4f77c..6506613 100644 --- ql/src/test/results/clientpositive/vector_if_expr_2.q.out +++ ql/src/test/results/clientpositive/vector_if_expr_2.q.out @@ -68,7 +68,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_include_no_sel.q.out ql/src/test/results/clientpositive/vector_include_no_sel.q.out index 848823f..ffb631b 100644 --- ql/src/test/results/clientpositive/vector_include_no_sel.q.out +++ ql/src/test/results/clientpositive/vector_include_no_sel.q.out @@ -261,7 +261,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_interval_1.q.out ql/src/test/results/clientpositive/vector_interval_1.q.out index 03dad18..02c7ccd 100644 --- ql/src/test/results/clientpositive/vector_interval_1.q.out +++ ql/src/test/results/clientpositive/vector_interval_1.q.out @@ -94,7 +94,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -203,7 +203,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -320,7 +320,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -449,7 +449,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -590,7 +590,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -713,7 +713,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -818,7 +818,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -929,7 +929,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out index 3ab7467..8812955 100644 --- ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out +++ ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out @@ -102,7 +102,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -267,7 +267,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -432,7 +432,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -601,7 +601,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -700,7 +700,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -867,7 +867,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1034,7 +1034,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1201,7 +1201,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out index 1654bd9..b5a0915 100644 --- ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out +++ ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out @@ -276,7 +276,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_left_outer_join.q.out ql/src/test/results/clientpositive/vector_left_outer_join.q.out index 2e0b82d..458c875 100644 --- ql/src/test/results/clientpositive/vector_left_outer_join.q.out +++ ql/src/test/results/clientpositive/vector_left_outer_join.q.out @@ -101,7 +101,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_left_outer_join2.q.out ql/src/test/results/clientpositive/vector_left_outer_join2.q.out index 979477b..f794534 100644 --- ql/src/test/results/clientpositive/vector_left_outer_join2.q.out +++ ql/src/test/results/clientpositive/vector_left_outer_join2.q.out @@ -359,7 +359,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -482,7 +482,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -605,7 +605,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -728,7 +728,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_like_2.q.out ql/src/test/results/clientpositive/vector_like_2.q.out index 26ff792..d355950 100644 --- ql/src/test/results/clientpositive/vector_like_2.q.out +++ ql/src/test/results/clientpositive/vector_like_2.q.out @@ -68,9 +68,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 diff --git ql/src/test/results/clientpositive/vector_mr_diff_schema_alias.q.out ql/src/test/results/clientpositive/vector_mr_diff_schema_alias.q.out index 634acb5..2b3b7a4 100644 --- ql/src/test/results/clientpositive/vector_mr_diff_schema_alias.q.out +++ ql/src/test/results/clientpositive/vector_mr_diff_schema_alias.q.out @@ -372,7 +372,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -410,7 +410,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_multi_insert.q.out ql/src/test/results/clientpositive/vector_multi_insert.q.out index 9fc92dd..28cbcb7 100644 --- ql/src/test/results/clientpositive/vector_multi_insert.q.out +++ ql/src/test/results/clientpositive/vector_multi_insert.q.out @@ -161,7 +161,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_non_constant_in_expr.q.out ql/src/test/results/clientpositive/vector_non_constant_in_expr.q.out index 2e792f1..537e151 100644 --- ql/src/test/results/clientpositive/vector_non_constant_in_expr.q.out +++ ql/src/test/results/clientpositive/vector_non_constant_in_expr.q.out @@ -34,7 +34,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: FILTER operator: Vectorizing IN expression only supported for constant values vectorized: false diff --git ql/src/test/results/clientpositive/vector_non_string_partition.q.out ql/src/test/results/clientpositive/vector_non_string_partition.q.out index 3e4f7b9..de0c693 100644 --- ql/src/test/results/clientpositive/vector_non_string_partition.q.out +++ ql/src/test/results/clientpositive/vector_non_string_partition.q.out @@ -80,7 +80,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -182,7 +182,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_null_projection.q.out ql/src/test/results/clientpositive/vector_null_projection.q.out index 8e491ff..cff9671 100644 --- ql/src/test/results/clientpositive/vector_null_projection.q.out +++ ql/src/test/results/clientpositive/vector_null_projection.q.out @@ -67,7 +67,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: Select expression for SELECT operator: Vectorizing data type void not supported when mode = PROJECTION vectorized: false diff --git ql/src/test/results/clientpositive/vector_nvl.q.out ql/src/test/results/clientpositive/vector_nvl.q.out index 1c5cedb..7907505 100644 --- ql/src/test/results/clientpositive/vector_nvl.q.out +++ ql/src/test/results/clientpositive/vector_nvl.q.out @@ -63,7 +63,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -154,7 +154,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -243,7 +243,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -316,7 +316,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] notVectorizedReason: Select expression for SELECT operator: Vectorizing data type void not supported when mode = PROJECTION vectorized: false diff --git ql/src/test/results/clientpositive/vector_order_null.q.out ql/src/test/results/clientpositive/vector_order_null.q.out index c50e275..8105196 100644 --- ql/src/test/results/clientpositive/vector_order_null.q.out +++ ql/src/test/results/clientpositive/vector_order_null.q.out @@ -120,9 +120,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -216,9 +217,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -312,9 +314,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -408,9 +411,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -504,9 +508,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -600,9 +605,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -696,9 +702,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -792,9 +799,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -888,9 +896,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -984,9 +993,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1080,9 +1090,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/vector_orderby_5.q.out ql/src/test/results/clientpositive/vector_orderby_5.q.out index 793d99e..d7a1ecc 100644 --- ql/src/test/results/clientpositive/vector_orderby_5.q.out +++ ql/src/test/results/clientpositive/vector_orderby_5.q.out @@ -164,7 +164,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -208,7 +208,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_outer_join0.q.out ql/src/test/results/clientpositive/vector_outer_join0.q.out index de6c894..c3f81b8 100644 --- ql/src/test/results/clientpositive/vector_outer_join0.q.out +++ ql/src/test/results/clientpositive/vector_outer_join0.q.out @@ -146,9 +146,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -265,9 +266,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/vector_outer_join1.q.out ql/src/test/results/clientpositive/vector_outer_join1.q.out index 9143fc8..c0a74df 100644 --- ql/src/test/results/clientpositive/vector_outer_join1.q.out +++ ql/src/test/results/clientpositive/vector_outer_join1.q.out @@ -306,9 +306,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -448,9 +449,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -723,9 +725,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vector_outer_join2.q.out ql/src/test/results/clientpositive/vector_outer_join2.q.out index 2fe0de3..8d0f14c 100644 --- ql/src/test/results/clientpositive/vector_outer_join2.q.out +++ ql/src/test/results/clientpositive/vector_outer_join2.q.out @@ -363,9 +363,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vector_outer_join3.q.out ql/src/test/results/clientpositive/vector_outer_join3.q.out index 1ab03fb..9a37d78 100644 --- ql/src/test/results/clientpositive/vector_outer_join3.q.out +++ ql/src/test/results/clientpositive/vector_outer_join3.q.out @@ -244,7 +244,7 @@ left outer join small_alltypesorc_a hd on hd.cstring1 = c.cstring1 ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 6]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col1"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 6]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 6]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col1"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":"[org.apache.hadoop.hive.ql.io.orc.OrcInputFormat]","allNative:":"false","usesVectorUDFAdaptor:":"false","vectorPartitionDescs:":"[(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)]","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 6]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a c left outer join small_alltypesorc_a cd @@ -284,7 +284,7 @@ left outer join small_alltypesorc_a hd on hd.cstring1 = c.cstring1 ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cstring2 (type: string)","columnExprMap:":{"_col0":"cstring2"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cstring1","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 7:string"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cstring2 (type: string)","columnExprMap:":{"_col0":"cstring2"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cstring1","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 7:string"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":"[org.apache.hadoop.hive.ql.io.orc.OrcInputFormat]","allNative:":"false","usesVectorUDFAdaptor:":"false","vectorPartitionDescs:":"[(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)]","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a c left outer join small_alltypesorc_a cd @@ -324,7 +324,7 @@ left outer join small_alltypesorc_a hd on hd.cstring1 = c.cstring1 and hd.cint = c.cint ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cbigint","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cbigint (type: bigint), cstring2 (type: string)","columnExprMap:":{"_col0":"cbigint","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cbigint","cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cbigint (type: bigint), cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cbigint","_col2":"cstring1","_col3":"cstring2"},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 3, 6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"0:_col2"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 3:bigint","col 7:string"],"bigTableValueExpressions:":["col 2:int","col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:int","col 1:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 3, 6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cbigint","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cbigint (type: bigint), cstring2 (type: string)","columnExprMap:":{"_col0":"cbigint","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cbigint","cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cbigint (type: bigint), cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cbigint","_col2":"cstring1","_col3":"cstring2"},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 3, 6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"0:_col2"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 3:bigint","col 7:string"],"bigTableValueExpressions:":["col 2:int","col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:int","col 1:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":"[org.apache.hadoop.hive.ql.io.orc.OrcInputFormat]","allNative:":"false","usesVectorUDFAdaptor:":"false","vectorPartitionDescs:":"[(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)]","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 3, 6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a c left outer join small_alltypesorc_a cd diff --git ql/src/test/results/clientpositive/vector_outer_join4.q.out ql/src/test/results/clientpositive/vector_outer_join4.q.out index 3792f3b..aef1e38 100644 --- ql/src/test/results/clientpositive/vector_outer_join4.q.out +++ ql/src/test/results/clientpositive/vector_outer_join4.q.out @@ -258,7 +258,7 @@ from small_alltypesorc_b c left outer join small_alltypesorc_b cd on cd.cint = c.cint POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col10":"0:_col10","_col11":"0:_col11","_col12":"1:_col0","_col13":"1:_col1","_col14":"1:_col2","_col15":"1:_col3","_col16":"1:_col4","_col17":"1:_col5","_col18":"1:_col6","_col19":"1:_col7","_col2":"0:_col2","_col20":"1:_col8","_col21":"1:_col9","_col22":"1:_col10","_col23":"1:_col11","_col3":"0:_col3","_col4":"0:_col4","_col5":"0:_col5","_col6":"0:_col6","_col7":"0:_col7","_col8":"0:_col8","_col9":"0:_col9"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint","col 1:smallint","col 2:int","col 3:bigint","col 4:float","col 5:double","col 6:string","col 7:string","col 8:timestamp","col 9:timestamp","col 10:boolean","col 11:boolean"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint, bigint, bigint, double, double, string, string, timestamp, timestamp, bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col10":"0:_col10","_col11":"0:_col11","_col12":"1:_col0","_col13":"1:_col1","_col14":"1:_col2","_col15":"1:_col3","_col16":"1:_col4","_col17":"1:_col5","_col18":"1:_col6","_col19":"1:_col7","_col2":"0:_col2","_col20":"1:_col8","_col21":"1:_col9","_col22":"1:_col10","_col23":"1:_col11","_col3":"0:_col3","_col4":"0:_col4","_col5":"0:_col5","_col6":"0:_col6","_col7":"0:_col7","_col8":"0:_col8","_col9":"0:_col9"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint","col 1:smallint","col 2:int","col 3:bigint","col 4:float","col 5:double","col 6:string","col 7:string","col 8:timestamp","col 9:timestamp","col 10:boolean","col 11:boolean"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":"[org.apache.hadoop.hive.ql.io.orc.OrcInputFormat]","allNative:":"false","usesVectorUDFAdaptor:":"false","vectorPartitionDescs:":"[(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)]","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint, bigint, bigint, double, double, string, string, timestamp, timestamp, bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} PREHOOK: query: select * from small_alltypesorc_b c left outer join small_alltypesorc_b cd @@ -339,7 +339,7 @@ from small_alltypesorc_b c left outer join small_alltypesorc_b hd on hd.ctinyint = c.ctinyint POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":"[org.apache.hadoop.hive.ql.io.orc.OrcInputFormat]","allNative:":"false","usesVectorUDFAdaptor:":"false","vectorPartitionDescs:":"[(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)]","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} PREHOOK: query: select c.ctinyint from small_alltypesorc_b c left outer join small_alltypesorc_b hd @@ -782,7 +782,7 @@ left outer join small_alltypesorc_b hd on hd.ctinyint = c.ctinyint ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), cint (type: int)","columnExprMap:":{"_col0":"ctinyint","_col1":"cint"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 2]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 36 Data size: 8082 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 2]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), cint (type: int)","columnExprMap:":{"_col0":"ctinyint","_col1":"cint"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 2]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 36 Data size: 8082 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":"[org.apache.hadoop.hive.ql.io.orc.OrcInputFormat]","allNative:":"false","usesVectorUDFAdaptor:":"false","vectorPartitionDescs:":"[(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)]","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 2]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.ctinyint from small_alltypesorc_b c left outer join small_alltypesorc_b cd diff --git ql/src/test/results/clientpositive/vector_outer_join6.q.out ql/src/test/results/clientpositive/vector_outer_join6.q.out index 5bd2bac..bbe6c27 100644 --- ql/src/test/results/clientpositive/vector_outer_join6.q.out +++ ql/src/test/results/clientpositive/vector_outer_join6.q.out @@ -130,7 +130,7 @@ POSTHOOK: query: explain vectorization detail formatted select tj1rnum, tj2rnum, tjoin3.rnum as rnumt3 from (select tjoin1.rnum tj1rnum, tjoin2.rnum tj2rnum, tjoin2.c1 tj2c1 from tjoin1 left outer join tjoin2 on tjoin1.c1 = tjoin2.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-7":{"ROOT STAGE":"TRUE"},"Stage-5":{"DEPENDENT STAGES":"Stage-7"},"Stage-0":{"DEPENDENT STAGES":"Stage-5"}},"STAGE PLANS":{"Stage-7":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_0:$hdt$_0:tjoin2":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_1:tjoin3":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_0:$hdt$_0:tjoin2":{"TableScan":{"alias:":"tjoin2","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin2","isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_1","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_21"}}}}}},"$hdt$_1:tjoin3":{"TableScan":{"alias:":"tjoin3","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin3","isTempTable:":"false","OperatorId:":"TS_8","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_9","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_19"}}}}}}}}},"Stage-5":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"tjoin1","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1]"},"Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_23","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col2":"1:_col0"},"condition map:":[{"":"Right Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2"],"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_24","children":{"Select Operator":{"expressions:":"_col2 (type: int), _col0 (type: int), _col1 (type: int)","columnExprMap:":{"_col0":"_col2","_col1":"_col0","_col2":"_col1"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 0, 1]"},"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_25","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col3":"1:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 2:int","col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col3"],"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_26","children":{"Select Operator":{"expressions:":"_col0 (type: int), _col1 (type: int), _col3 (type: int)","columnExprMap:":{"_col0":"_col0","_col1":"_col1","_col2":"_col3"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1, 2]"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_27","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_28"}}}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"3","includeColumns:":"[0, 1]","dataColumns:":["rnum:int","c1:int","c2:int"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_29"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-7":{"ROOT STAGE":"TRUE"},"Stage-5":{"DEPENDENT STAGES":"Stage-7"},"Stage-0":{"DEPENDENT STAGES":"Stage-5"}},"STAGE PLANS":{"Stage-7":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_0:$hdt$_0:tjoin2":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_1:tjoin3":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_0:$hdt$_0:tjoin2":{"TableScan":{"alias:":"tjoin2","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin2","isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_1","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_21"}}}}}},"$hdt$_1:tjoin3":{"TableScan":{"alias:":"tjoin3","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin3","isTempTable:":"false","OperatorId:":"TS_8","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_9","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_19"}}}}}}}}},"Stage-5":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"tjoin1","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1]"},"Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_23","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col2":"1:_col0"},"condition map:":[{"":"Right Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2"],"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_24","children":{"Select Operator":{"expressions:":"_col2 (type: int), _col0 (type: int), _col1 (type: int)","columnExprMap:":{"_col0":"_col2","_col1":"_col0","_col2":"_col1"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 0, 1]"},"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_25","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col3":"1:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 2:int","col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col3"],"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_26","children":{"Select Operator":{"expressions:":"_col0 (type: int), _col1 (type: int), _col3 (type: int)","columnExprMap:":{"_col0":"_col0","_col1":"_col1","_col2":"_col3"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1, 2]"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_27","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_28"}}}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":"[org.apache.hadoop.hive.ql.io.orc.OrcInputFormat]","allNative:":"false","usesVectorUDFAdaptor:":"false","vectorPartitionDescs:":"[(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)]","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"3","includeColumns:":"[0, 1]","dataColumns:":["rnum:int","c1:int","c2:int"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_29"}}}}}} PREHOOK: query: select tj1rnum, tj2rnum, tjoin3.rnum as rnumt3 from (select tjoin1.rnum tj1rnum, tjoin2.rnum tj2rnum, tjoin2.c1 tj2c1 from tjoin1 left outer join tjoin2 on tjoin1.c1 = tjoin2.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 PREHOOK: type: QUERY @@ -157,7 +157,7 @@ POSTHOOK: query: explain vectorization detail formatted select tj1rnum, tj2rnum as rnumt3 from (select tjoin1.rnum tj1rnum, tjoin2.rnum tj2rnum, tjoin2.c1 tj2c1 from tjoin1 left outer join tjoin2 on tjoin1.c1 = tjoin2.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-7":{"ROOT STAGE":"TRUE"},"Stage-5":{"DEPENDENT STAGES":"Stage-7"},"Stage-0":{"DEPENDENT STAGES":"Stage-5"}},"STAGE PLANS":{"Stage-7":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_0:$hdt$_0:tjoin2":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_1:tjoin3":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_0:$hdt$_0:tjoin2":{"TableScan":{"alias:":"tjoin2","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin2","isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_1","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_21"}}}}}},"$hdt$_1:tjoin3":{"TableScan":{"alias:":"tjoin3","columns:":["c1"],"database:":"default","Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin3","isTempTable:":"false","OperatorId:":"TS_8","children":{"Select Operator":{"expressions:":"c1 (type: int)","columnExprMap:":{"_col0":"c1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_9","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_19"}}}}}}}}},"Stage-5":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"tjoin1","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1]"},"Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_23","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col2":"1:_col0"},"condition map:":[{"":"Right Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2"],"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_24","children":{"Select Operator":{"expressions:":"_col2 (type: int), _col0 (type: int), _col1 (type: int)","columnExprMap:":{"_col0":"_col2","_col1":"_col0","_col2":"_col1"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 0, 1]"},"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_25","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 2:int","col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_26","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_27"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"3","includeColumns:":"[0, 1]","dataColumns:":["rnum:int","c1:int","c2:int"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_28"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-7":{"ROOT STAGE":"TRUE"},"Stage-5":{"DEPENDENT STAGES":"Stage-7"},"Stage-0":{"DEPENDENT STAGES":"Stage-5"}},"STAGE PLANS":{"Stage-7":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_0:$hdt$_0:tjoin2":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_1:tjoin3":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_0:$hdt$_0:tjoin2":{"TableScan":{"alias:":"tjoin2","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin2","isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_1","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_21"}}}}}},"$hdt$_1:tjoin3":{"TableScan":{"alias:":"tjoin3","columns:":["c1"],"database:":"default","Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin3","isTempTable:":"false","OperatorId:":"TS_8","children":{"Select Operator":{"expressions:":"c1 (type: int)","columnExprMap:":{"_col0":"c1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_9","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_19"}}}}}}}}},"Stage-5":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"tjoin1","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","table:":"tjoin1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1]"},"Statistics:":"Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_23","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col2":"1:_col0"},"condition map:":[{"":"Right Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2"],"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_24","children":{"Select Operator":{"expressions:":"_col2 (type: int), _col0 (type: int), _col1 (type: int)","columnExprMap:":{"_col0":"_col2","_col1":"_col0","_col2":"_col1"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 0, 1]"},"Statistics:":"Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_25","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 2:int","col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_26","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_27"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":"[org.apache.hadoop.hive.ql.io.orc.OrcInputFormat]","allNative:":"false","usesVectorUDFAdaptor:":"false","vectorPartitionDescs:":"[(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)]","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"3","includeColumns:":"[0, 1]","dataColumns:":["rnum:int","c1:int","c2:int"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_28"}}}}}} PREHOOK: query: select tj1rnum, tj2rnum as rnumt3 from (select tjoin1.rnum tj1rnum, tjoin2.rnum tj2rnum, tjoin2.c1 tj2c1 from tjoin1 left outer join tjoin2 on tjoin1.c1 = tjoin2.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/vector_outer_reference_windowed.q.out ql/src/test/results/clientpositive/vector_outer_reference_windowed.q.out index fe68e5c..e7162e2 100644 --- ql/src/test/results/clientpositive/vector_outer_reference_windowed.q.out +++ ql/src/test/results/clientpositive/vector_outer_reference_windowed.q.out @@ -301,9 +301,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -352,9 +353,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -490,9 +492,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -542,9 +545,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -731,9 +735,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -783,9 +788,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -976,9 +982,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1028,9 +1035,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1221,9 +1229,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1368,9 +1377,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1419,9 +1429,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -1557,9 +1568,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -1609,9 +1621,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1798,9 +1811,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -1850,9 +1864,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -2043,9 +2058,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -2095,9 +2111,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 @@ -2288,9 +2305,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 3 diff --git ql/src/test/results/clientpositive/vector_reduce1.q.out ql/src/test/results/clientpositive/vector_reduce1.q.out index 173090b..b28e35c 100644 --- ql/src/test/results/clientpositive/vector_reduce1.q.out +++ ql/src/test/results/clientpositive/vector_reduce1.q.out @@ -147,7 +147,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_reduce2.q.out ql/src/test/results/clientpositive/vector_reduce2.q.out index 83a858a..d634f4d 100644 --- ql/src/test/results/clientpositive/vector_reduce2.q.out +++ ql/src/test/results/clientpositive/vector_reduce2.q.out @@ -147,7 +147,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_reduce3.q.out ql/src/test/results/clientpositive/vector_reduce3.q.out index d684ddf..829193a 100644 --- ql/src/test/results/clientpositive/vector_reduce3.q.out +++ ql/src/test/results/clientpositive/vector_reduce3.q.out @@ -147,7 +147,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out index b46501e..511fad8 100644 --- ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out +++ ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out @@ -85,7 +85,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -130,7 +130,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_reduce_groupby_duplicate_cols.q.out ql/src/test/results/clientpositive/vector_reduce_groupby_duplicate_cols.q.out index 7ccbf8f..e7f5542 100644 --- ql/src/test/results/clientpositive/vector_reduce_groupby_duplicate_cols.q.out +++ ql/src/test/results/clientpositive/vector_reduce_groupby_duplicate_cols.q.out @@ -120,9 +120,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] - inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + inputFileFormats: [org.apache.hadoop.mapred.TextInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.TextInputFormat, LAZY_SIMPLE)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/vector_string_concat.q.out ql/src/test/results/clientpositive/vector_string_concat.q.out index ff3b755..7cde63d 100644 --- ql/src/test/results/clientpositive/vector_string_concat.q.out +++ ql/src/test/results/clientpositive/vector_string_concat.q.out @@ -153,7 +153,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -371,7 +371,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -414,7 +414,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_string_decimal.q.out ql/src/test/results/clientpositive/vector_string_decimal.q.out index 59b0588..21e1e33 100644 --- ql/src/test/results/clientpositive/vector_string_decimal.q.out +++ ql/src/test/results/clientpositive/vector_string_decimal.q.out @@ -94,7 +94,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_struct_in.q.out ql/src/test/results/clientpositive/vector_struct_in.q.out index 773908b..8c5e7fe 100644 --- ql/src/test/results/clientpositive/vector_struct_in.q.out +++ ql/src/test/results/clientpositive/vector_struct_in.q.out @@ -92,7 +92,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -205,7 +205,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -342,7 +342,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -455,7 +455,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -592,7 +592,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -705,7 +705,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -845,7 +845,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -961,7 +961,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_tablesample_rows.q.out ql/src/test/results/clientpositive/vector_tablesample_rows.q.out index 3280236..37feaf3 100644 --- ql/src/test/results/clientpositive/vector_tablesample_rows.q.out +++ ql/src/test/results/clientpositive/vector_tablesample_rows.q.out @@ -49,9 +49,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -144,9 +145,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vector_udf2.q.out ql/src/test/results/clientpositive/vector_udf2.q.out index 222a901..4e89622 100644 --- ql/src/test/results/clientpositive/vector_udf2.q.out +++ ql/src/test/results/clientpositive/vector_udf2.q.out @@ -92,7 +92,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_udf3.q.out ql/src/test/results/clientpositive/vector_udf3.q.out index a10b07e..3334f3d 100644 --- ql/src/test/results/clientpositive/vector_udf3.q.out +++ ql/src/test/results/clientpositive/vector_udf3.q.out @@ -50,7 +50,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_varchar_4.q.out ql/src/test/results/clientpositive/vector_varchar_4.q.out index 00a82c3..eaae23e 100644 --- ql/src/test/results/clientpositive/vector_varchar_4.q.out +++ ql/src/test/results/clientpositive/vector_varchar_4.q.out @@ -176,7 +176,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out index 282aec4..4cece8b 100644 --- ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out +++ ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out @@ -193,7 +193,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -302,7 +302,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -413,7 +413,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_varchar_simple.q.out ql/src/test/results/clientpositive/vector_varchar_simple.q.out index 0a144d2..d395dc1 100644 --- ql/src/test/results/clientpositive/vector_varchar_simple.q.out +++ ql/src/test/results/clientpositive/vector_varchar_simple.q.out @@ -86,7 +86,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -195,7 +195,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -318,7 +318,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vector_when_case_null.q.out ql/src/test/results/clientpositive/vector_when_case_null.q.out index d7cc0b7..75c1fe3 100644 --- ql/src/test/results/clientpositive/vector_when_case_null.q.out +++ ql/src/test/results/clientpositive/vector_when_case_null.q.out @@ -79,7 +79,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorization_1.q.out ql/src/test/results/clientpositive/vectorization_1.q.out index e72321b..1a9bacb 100644 --- ql/src/test/results/clientpositive/vectorization_1.q.out +++ ql/src/test/results/clientpositive/vectorization_1.q.out @@ -100,9 +100,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorization_10.q.out ql/src/test/results/clientpositive/vectorization_10.q.out index 325d1a7..3616692 100644 --- ql/src/test/results/clientpositive/vectorization_10.q.out +++ ql/src/test/results/clientpositive/vectorization_10.q.out @@ -96,9 +96,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorization_11.q.out ql/src/test/results/clientpositive/vectorization_11.q.out index 027d718..b584fec 100644 --- ql/src/test/results/clientpositive/vectorization_11.q.out +++ ql/src/test/results/clientpositive/vectorization_11.q.out @@ -78,9 +78,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorization_12.q.out ql/src/test/results/clientpositive/vectorization_12.q.out index fb0a065..0a61b44 100644 --- ql/src/test/results/clientpositive/vectorization_12.q.out +++ ql/src/test/results/clientpositive/vectorization_12.q.out @@ -127,9 +127,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -182,9 +183,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 19 diff --git ql/src/test/results/clientpositive/vectorization_13.q.out ql/src/test/results/clientpositive/vectorization_13.q.out index caad57a..c4953cf 100644 --- ql/src/test/results/clientpositive/vectorization_13.q.out +++ ql/src/test/results/clientpositive/vectorization_13.q.out @@ -129,9 +129,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -184,9 +185,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 21 @@ -458,7 +460,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -506,7 +508,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorization_14.q.out ql/src/test/results/clientpositive/vectorization_14.q.out index 0a0c5fb..92afcb5 100644 --- ql/src/test/results/clientpositive/vectorization_14.q.out +++ ql/src/test/results/clientpositive/vectorization_14.q.out @@ -130,9 +130,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -185,9 +186,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 22 diff --git ql/src/test/results/clientpositive/vectorization_15.q.out ql/src/test/results/clientpositive/vectorization_15.q.out index e020bb8..5b34e9c 100644 --- ql/src/test/results/clientpositive/vectorization_15.q.out +++ ql/src/test/results/clientpositive/vectorization_15.q.out @@ -125,9 +125,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -179,9 +180,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 21 diff --git ql/src/test/results/clientpositive/vectorization_16.q.out ql/src/test/results/clientpositive/vectorization_16.q.out index 69f5982..e1d567a 100644 --- ql/src/test/results/clientpositive/vectorization_16.q.out +++ ql/src/test/results/clientpositive/vectorization_16.q.out @@ -102,9 +102,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorization_17.q.out ql/src/test/results/clientpositive/vectorization_17.q.out index 7104ae5..6288354 100644 --- ql/src/test/results/clientpositive/vectorization_17.q.out +++ ql/src/test/results/clientpositive/vectorization_17.q.out @@ -96,9 +96,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorization_2.q.out ql/src/test/results/clientpositive/vectorization_2.q.out index 6c3b277..cfbfd21 100644 --- ql/src/test/results/clientpositive/vectorization_2.q.out +++ ql/src/test/results/clientpositive/vectorization_2.q.out @@ -104,9 +104,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorization_3.q.out ql/src/test/results/clientpositive/vectorization_3.q.out index 0a0a3b9..e354607 100644 --- ql/src/test/results/clientpositive/vectorization_3.q.out +++ ql/src/test/results/clientpositive/vectorization_3.q.out @@ -109,9 +109,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorization_4.q.out ql/src/test/results/clientpositive/vectorization_4.q.out index ee61869..5eaad00 100644 --- ql/src/test/results/clientpositive/vectorization_4.q.out +++ ql/src/test/results/clientpositive/vectorization_4.q.out @@ -104,9 +104,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorization_5.q.out ql/src/test/results/clientpositive/vectorization_5.q.out index 13fa89b..9935e29 100644 --- ql/src/test/results/clientpositive/vectorization_5.q.out +++ ql/src/test/results/clientpositive/vectorization_5.q.out @@ -98,9 +98,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorization_6.q.out ql/src/test/results/clientpositive/vectorization_6.q.out index f2204fc..dedeee2 100644 --- ql/src/test/results/clientpositive/vectorization_6.q.out +++ ql/src/test/results/clientpositive/vectorization_6.q.out @@ -90,9 +90,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorization_7.q.out ql/src/test/results/clientpositive/vectorization_7.q.out index 51d2b45..557e357 100644 --- ql/src/test/results/clientpositive/vectorization_7.q.out +++ ql/src/test/results/clientpositive/vectorization_7.q.out @@ -102,9 +102,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -322,7 +323,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorization_8.q.out ql/src/test/results/clientpositive/vectorization_8.q.out index 1da5623..112f2d3 100644 --- ql/src/test/results/clientpositive/vectorization_8.q.out +++ ql/src/test/results/clientpositive/vectorization_8.q.out @@ -98,9 +98,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -305,7 +306,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorization_9.q.out ql/src/test/results/clientpositive/vectorization_9.q.out index 69f5982..e1d567a 100644 --- ql/src/test/results/clientpositive/vectorization_9.q.out +++ ql/src/test/results/clientpositive/vectorization_9.q.out @@ -102,9 +102,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorization_decimal_date.q.out ql/src/test/results/clientpositive/vectorization_decimal_date.q.out index 44e4632..dc9d608 100644 --- ql/src/test/results/clientpositive/vectorization_decimal_date.q.out +++ ql/src/test/results/clientpositive/vectorization_decimal_date.q.out @@ -70,7 +70,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorization_div0.q.out ql/src/test/results/clientpositive/vectorization_div0.q.out index 64c05c7..e73e49e 100644 --- ql/src/test/results/clientpositive/vectorization_div0.q.out +++ ql/src/test/results/clientpositive/vectorization_div0.q.out @@ -52,7 +52,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -229,7 +229,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -427,7 +427,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -625,7 +625,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorization_limit.q.out ql/src/test/results/clientpositive/vectorization_limit.q.out index f493191..bf4ef00 100644 --- ql/src/test/results/clientpositive/vectorization_limit.q.out +++ ql/src/test/results/clientpositive/vectorization_limit.q.out @@ -41,7 +41,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -124,9 +124,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -253,9 +254,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -380,9 +382,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -508,9 +511,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -665,9 +669,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 @@ -716,9 +721,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTOR_DESERIALIZE, org.apache.hadoop.mapred.SequenceFileInputFormat, LAZY_BINARY)] vectorized: true rowBatchContext: dataColumnCount: 2 diff --git ql/src/test/results/clientpositive/vectorization_offset_limit.q.out ql/src/test/results/clientpositive/vectorization_offset_limit.q.out index 0bdbd97..ad414b5 100644 --- ql/src/test/results/clientpositive/vectorization_offset_limit.q.out +++ ql/src/test/results/clientpositive/vectorization_offset_limit.q.out @@ -42,7 +42,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -119,7 +119,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorization_parquet_projection.q.out ql/src/test/results/clientpositive/vectorization_parquet_projection.q.out index ade32ee..50f7003 100644 --- ql/src/test/results/clientpositive/vectorization_parquet_projection.q.out +++ ql/src/test/results/clientpositive/vectorization_parquet_projection.q.out @@ -147,7 +147,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] notVectorizedReason: Select expression for SELECT operator: Vectorizing complex type MAP not enabled (map) since hive.vectorized.complex.types.enabled IS false vectorized: false @@ -223,7 +223,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -303,7 +303,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -385,7 +385,7 @@ STAGE PLANS: Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] notVectorizedReason: Select expression for SELECT operator: Vectorizing complex type MAP not enabled (map) since hive.vectorized.complex.types.enabled IS false vectorized: false Reduce Vectorization: @@ -484,7 +484,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -561,6 +561,7 @@ STAGE PLANS: enabled: true inputFormatFeatureSupport: [] featureSupportInUse: [] + inputFileFormats: [] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorization_part_project.q.out ql/src/test/results/clientpositive/vectorization_part_project.q.out index a0e1d91..1c8eb07 100644 --- ql/src/test/results/clientpositive/vectorization_part_project.q.out +++ ql/src/test/results/clientpositive/vectorization_part_project.q.out @@ -80,7 +80,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorization_pushdown.q.out ql/src/test/results/clientpositive/vectorization_pushdown.q.out index a13a0a0..db2d73f 100644 --- ql/src/test/results/clientpositive/vectorization_pushdown.q.out +++ ql/src/test/results/clientpositive/vectorization_pushdown.q.out @@ -41,7 +41,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_bucketmapjoin1.q.out ql/src/test/results/clientpositive/vectorized_bucketmapjoin1.q.out index c9a8fe3..6768a5e 100644 --- ql/src/test/results/clientpositive/vectorized_bucketmapjoin1.q.out +++ ql/src/test/results/clientpositive/vectorized_bucketmapjoin1.q.out @@ -156,7 +156,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -243,7 +243,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -330,7 +330,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_case.q.out ql/src/test/results/clientpositive/vectorized_case.q.out index 0489d72..9491cc6 100644 --- ql/src/test/results/clientpositive/vectorized_case.q.out +++ ql/src/test/results/clientpositive/vectorized_case.q.out @@ -83,7 +83,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -221,7 +221,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -297,7 +297,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -405,7 +405,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_casts.q.out ql/src/test/results/clientpositive/vectorized_casts.q.out index f6f2105..3861e63 100644 --- ql/src/test/results/clientpositive/vectorized_casts.q.out +++ ql/src/test/results/clientpositive/vectorized_casts.q.out @@ -198,9 +198,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorized_context.q.out ql/src/test/results/clientpositive/vectorized_context.q.out index 539223b..7d0afaf 100644 --- ql/src/test/results/clientpositive/vectorized_context.q.out +++ ql/src/test/results/clientpositive/vectorized_context.q.out @@ -196,7 +196,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_date_funcs.q.out ql/src/test/results/clientpositive/vectorized_date_funcs.q.out index 298d92b..ab58fd7 100644 --- ql/src/test/results/clientpositive/vectorized_date_funcs.q.out +++ ql/src/test/results/clientpositive/vectorized_date_funcs.q.out @@ -286,7 +286,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -574,7 +574,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -866,7 +866,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1136,7 +1136,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1260,7 +1260,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -1303,7 +1303,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_distinct_gby.q.out ql/src/test/results/clientpositive/vectorized_distinct_gby.q.out index 7c4e0ed..e6773ae 100644 --- ql/src/test/results/clientpositive/vectorized_distinct_gby.q.out +++ ql/src/test/results/clientpositive/vectorized_distinct_gby.q.out @@ -78,9 +78,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 2 @@ -182,9 +183,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 12 diff --git ql/src/test/results/clientpositive/vectorized_mapjoin.q.out ql/src/test/results/clientpositive/vectorized_mapjoin.q.out index a6fee45..65c6b19 100644 --- ql/src/test/results/clientpositive/vectorized_mapjoin.q.out +++ ql/src/test/results/clientpositive/vectorized_mapjoin.q.out @@ -113,7 +113,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out index e9a0e45..7388f6c 100644 --- ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out +++ ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out @@ -134,7 +134,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_math_funcs.q.out ql/src/test/results/clientpositive/vectorized_math_funcs.q.out index d5ba561..cbe59fb 100644 --- ql/src/test/results/clientpositive/vectorized_math_funcs.q.out +++ ql/src/test/results/clientpositive/vectorized_math_funcs.q.out @@ -151,7 +151,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_parquet_types.q.out ql/src/test/results/clientpositive/vectorized_parquet_types.q.out index 0dc582f..5008c50 100644 --- ql/src/test/results/clientpositive/vectorized_parquet_types.q.out +++ ql/src/test/results/clientpositive/vectorized_parquet_types.q.out @@ -169,7 +169,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -260,7 +260,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -382,7 +382,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -426,7 +426,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -563,7 +563,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -654,7 +654,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -733,7 +733,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_shufflejoin.q.out ql/src/test/results/clientpositive/vectorized_shufflejoin.q.out index 6edb69a..8c4a7b1 100644 --- ql/src/test/results/clientpositive/vectorized_shufflejoin.q.out +++ ql/src/test/results/clientpositive/vectorized_shufflejoin.q.out @@ -103,7 +103,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -146,7 +146,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + inputFileFormats: [org.apache.hadoop.mapred.SequenceFileInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_string_funcs.q.out ql/src/test/results/clientpositive/vectorized_string_funcs.q.out index 51f3d5b..b6d43a1 100644 --- ql/src/test/results/clientpositive/vectorized_string_funcs.q.out +++ ql/src/test/results/clientpositive/vectorized_string_funcs.q.out @@ -77,7 +77,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_timestamp.q.out ql/src/test/results/clientpositive/vectorized_timestamp.q.out index 6c49e03..72d0a67 100644 --- ql/src/test/results/clientpositive/vectorized_timestamp.q.out +++ ql/src/test/results/clientpositive/vectorized_timestamp.q.out @@ -155,9 +155,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -257,9 +258,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -342,9 +344,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 @@ -448,9 +451,10 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false + vectorPartitionDescs: [(VECTORIZED_INPUT_FILE_FORMAT, org.apache.hadoop.hive.ql.io.orc.OrcInputFormat)] vectorized: true rowBatchContext: dataColumnCount: 1 diff --git ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out index d864b57..996b857 100644 --- ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out +++ ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out @@ -131,7 +131,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -296,7 +296,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -461,7 +461,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -626,7 +626,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -752,7 +752,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -859,7 +859,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true @@ -978,7 +978,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: false vectorized: true diff --git ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out index a203507..e944e2f 100644 --- ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out +++ ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out @@ -83,7 +83,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true @@ -243,7 +243,7 @@ STAGE PLANS: enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true inputFormatFeatureSupport: [] featureSupportInUse: [] - inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + inputFileFormats: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] allNative: false usesVectorUDFAdaptor: true vectorized: true