diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/BaseVectorizedColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/BaseVectorizedColumnReader.java index 907a9b8..46b6a0a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/BaseVectorizedColumnReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/BaseVectorizedColumnReader.java @@ -14,10 +14,10 @@ package org.apache.hadoop.hive.ql.io.parquet.vector; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.parquet.bytes.BytesInput; import org.apache.parquet.bytes.BytesUtils; import org.apache.parquet.column.ColumnDescriptor; -import org.apache.parquet.column.Dictionary; import org.apache.parquet.column.Encoding; import org.apache.parquet.column.page.DataPage; import org.apache.parquet.column.page.DataPageV1; @@ -62,7 +62,7 @@ /** * The dictionary, if this column has dictionary encoding. */ - protected final Dictionary dictionary; + protected final ParquetDataColumnReader dictionary; /** * If true, the current page is dictionary encoded. @@ -82,7 +82,7 @@ */ protected IntIterator repetitionLevelColumn; protected IntIterator definitionLevelColumn; - protected ValuesReader dataColumn; + protected ParquetDataColumnReader dataColumn; /** * Total values in the current page. @@ -92,22 +92,39 @@ protected final PageReader pageReader; protected final ColumnDescriptor descriptor; protected final Type type; + protected final TypeInfo hiveType; + + /** + * Used for VectorizedDummyColumnReader. + */ + public BaseVectorizedColumnReader(){ + this.pageReader = null; + this.descriptor = null; + this.type = null; + this.dictionary = null; + this.hiveType = null; + this.maxDefLevel = -1; + } public BaseVectorizedColumnReader( ColumnDescriptor descriptor, PageReader pageReader, boolean skipTimestampConversion, - Type type) throws IOException { + Type parquetType, TypeInfo hiveType) throws IOException { this.descriptor = descriptor; - this.type = type; + this.type = parquetType; this.pageReader = pageReader; this.maxDefLevel = descriptor.getMaxDefinitionLevel(); this.skipTimestampConversion = skipTimestampConversion; + this.hiveType = hiveType; DictionaryPage dictionaryPage = pageReader.readDictionaryPage(); if (dictionaryPage != null) { try { - this.dictionary = dictionaryPage.getEncoding().initDictionary(descriptor, dictionaryPage); + this.dictionary = ParquetDataColumnReaderFactory + .getDataColumnReaderByTypeOnDictionary(parquetType.asPrimitiveType(), + dictionaryPage.getEncoding().initDictionary(descriptor, dictionaryPage), + skipTimestampConversion); this.isCurrentPageDictionaryEncoded = true; } catch (IOException e) { throw new IOException("could not decode the dictionary for " + descriptor, e); @@ -146,7 +163,8 @@ public Void visit(DataPageV2 dataPageV2) { }); } - private void initDataReader(Encoding dataEncoding, byte[] bytes, int offset, int valueCount) throws IOException { + private void initDataReader(Encoding dataEncoding, byte[] bytes, int offset, int valueCount) + throws IOException { this.pageValueCount = valueCount; this.endOfPageValueCount = valuesRead + pageValueCount; if (dataEncoding.usesDictionary()) { @@ -156,10 +174,13 @@ private void initDataReader(Encoding dataEncoding, byte[] bytes, int offset, int "could not read page in col " + descriptor + " as the dictionary was missing for encoding " + dataEncoding); } - dataColumn = dataEncoding.getDictionaryBasedValuesReader(descriptor, VALUES, dictionary); + dataColumn = ParquetDataColumnReaderFactory.getDataColumnReaderByType(type.asPrimitiveType(), + dataEncoding.getDictionaryBasedValuesReader(descriptor, VALUES, dictionary + .getDictionary()), skipTimestampConversion); this.isCurrentPageDictionaryEncoded = true; } else { - dataColumn = dataEncoding.getValuesReader(descriptor, VALUES); + dataColumn = ParquetDataColumnReaderFactory.getDataColumnReaderByType(type.asPrimitiveType(), + dataEncoding.getValuesReader(descriptor, VALUES), skipTimestampConversion); this.isCurrentPageDictionaryEncoded = false; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java new file mode 100644 index 0000000..bc1572e --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.io.parquet.vector; + +import org.apache.parquet.column.Dictionary; + +import java.io.IOException; +import java.sql.Timestamp; + +public interface ParquetDataColumnReader { + void initFromPage(int valueCount, byte[] page, int offset) throws IOException; + + int readValueDictionaryId(); + + long readLong(); + + int readInteger(); + + float readFloat(); + + boolean readBoolean(); + + byte[] readString(); + + byte[] readBytes(); + + byte[] readDecimal(); + + double readDouble(); + + Timestamp readTimestamp(); + + Dictionary getDictionary(); + + byte[] readBytes(int id); + + float readFloat(int id); + + double readDouble(int id); + + int readInteger(int id); + + long readLong(int id); + + boolean readBoolean(int id); + + byte[] readDecimal(int id); + + Timestamp readTimestamp(int id); + + byte[] readString(int id); +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java new file mode 100644 index 0000000..acdc5f1 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java @@ -0,0 +1,568 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.io.parquet.vector; + +import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime; +import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.parquet.column.Dictionary; +import org.apache.parquet.column.values.ValuesReader; +import org.apache.parquet.io.api.Binary; +import org.apache.parquet.schema.OriginalType; +import org.apache.parquet.schema.PrimitiveType; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.sql.Timestamp; + +/** + * Parquet file has self-describing schema which may differ from the user required schema (e.g. + * schema evolution). This factory is used to retrieve user required typed data via corresponding + * reader which reads the underlying data. + */ +public final class ParquetDataColumnReaderFactory { + + private ParquetDataColumnReaderFactory() { + } + + /** + * The default data column reader for existing Parquet page reader which works for both + * dictionary or non dictionary types, Mirror from dictionary encoding path. + */ + public static class DefaultParquetDataColumnReader implements ParquetDataColumnReader { + protected ValuesReader valuesReader; + protected Dictionary dict; + + public DefaultParquetDataColumnReader(ValuesReader valuesReader) { + this.valuesReader = valuesReader; + } + + public DefaultParquetDataColumnReader(Dictionary dict) { + this.dict = dict; + } + + public void initFromPage(int i, ByteBuffer byteBuffer, int i1) throws IOException { + valuesReader.initFromPage(i, byteBuffer, i1); + } + + public void initFromPage(int valueCount, byte[] page, int offset) throws IOException { + this.initFromPage(valueCount, ByteBuffer.wrap(page), offset); + } + + /** + * @return the next boolean from the page + */ + public boolean readBoolean() { + return valuesReader.readBoolean(); + } + + public boolean readBoolean(int id){ + return dict.decodeToBoolean(id); + } + + + @Override + public byte[] readString(int id) { + return dict.decodeToBinary(id).getBytesUnsafe(); + } + + @Override + public byte[] readString() { + return valuesReader.readBytes().getBytesUnsafe(); + } + + /** + * @return the next Binary from the page + */ + public byte[] readBytes() { + return valuesReader.readBytes().getBytesUnsafe(); + } + + public byte[] readBytes(int id) { + return dict.decodeToBinary(id).getBytesUnsafe(); + } + + @Override + public byte[] readDecimal() { + return valuesReader.readBytes().getBytesUnsafe(); + } + + @Override + public byte[] readDecimal(int id) { + return dict.decodeToBinary(id).getBytesUnsafe(); + } + + /** + * @return the next float from the page + */ + public float readFloat() { + return valuesReader.readFloat(); + } + + public float readFloat(int id) { + return dict.decodeToFloat(id); + } + + /** + * @return the next double from the page + */ + public double readDouble() { + return valuesReader.readDouble(); + } + + public double readDouble(int id) { + return dict.decodeToDouble(id); + } + + @Override + public Timestamp readTimestamp() { + throw new RuntimeException("Unsupported operation"); + } + + @Override + public Timestamp readTimestamp(int id) { + throw new RuntimeException("Unsupported operation"); + } + + /** + * @return the next integer from the page + */ + public int readInteger() { + return valuesReader.readInteger(); + } + + public int readInteger(int id) { + return dict.decodeToInt(id); + } + + public long readLong(int id) { + return dict.decodeToLong(id); + } + + /** + * @return the next long from the page + */ + public long readLong() { + return valuesReader.readLong(); + } + + public int readValueDictionaryId() { + return valuesReader.readValueDictionaryId(); + } + + public void skip() { + valuesReader.skip(); + } + + @Override + public Dictionary getDictionary() { + return dict; + } + } + + /** + * The reader who reads from the underlying int32 value value. Implementation is in consist with + * ETypeConverter EINT32_CONVERTER + */ + public static class TypesFromInt32PageReader extends DefaultParquetDataColumnReader { + + public TypesFromInt32PageReader(ValuesReader realReader) { + super(realReader); + } + + public TypesFromInt32PageReader(Dictionary dict) { + super(dict); + } + + @Override + public long readLong() { + return valuesReader.readInteger(); + } + + @Override + public long readLong(int id) { + return dict.decodeToInt(id); + } + + @Override + public float readFloat() { + return valuesReader.readInteger(); + } + + @Override + public float readFloat(int id) { + return dict.decodeToInt(id); + } + + @Override + public double readDouble() { + return valuesReader.readInteger(); + } + + public double readDouble(int id) { + return dict.decodeToInt(id); + } + + private byte[] convertToString(int value) { + try { + // convert integer to string + return Integer.toString(value).getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("Failed to encode string in UTF-8", e); + } + } + + @Override + public byte[] readString() { + return convertToString(valuesReader.readInteger()); + } + + @Override + public byte[] readString(int id) { + return convertToString(dict.decodeToInt(id)); + } + } + + /** + * The reader who reads from the underlying int64 value value. Implementation is in consist with + * ETypeConverter EINT64_CONVERTER + */ + public static class TypesFromInt64PageReader extends DefaultParquetDataColumnReader { + + public TypesFromInt64PageReader(ValuesReader realReader) { + super(realReader); + } + + public TypesFromInt64PageReader(Dictionary dictionary) { + super(dictionary); + } + + @Override + public float readFloat() { + return valuesReader.readLong(); + } + + @Override + public float readFloat(int id) { + return dict.decodeToLong(id); + } + + @Override + public double readDouble() { + return valuesReader.readLong(); + } + + public double readDouble(int id) { + return dict.decodeToLong(id); + } + + private byte[] convertToByte(long value) { + try { + // convert long to string + return Long.toString(value).getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("Failed to encode string in UTF-8", e); + } + } + + @Override + public byte[] readString(int id) { + return convertToByte(dict.decodeToLong(id)); + } + + @Override + public byte[] readString() { + return convertToByte(valuesReader.readLong()); + } + } + + /** + * The reader who reads from the underlying float value value. Implementation is in consist with + * ETypeConverter EFLOAT_CONVERTER + */ + public static class TypesFromFloatPageReader extends DefaultParquetDataColumnReader { + + public TypesFromFloatPageReader(ValuesReader realReader) { + super(realReader); + } + + public TypesFromFloatPageReader(Dictionary realReader) { + super(realReader); + } + + @Override + public double readDouble() { + return valuesReader.readFloat(); + } + + @Override + public double readDouble(int id) { + return dict.decodeToFloat(id); + } + + private byte[] convert(float value) { + try { + // convert float to string + return Float.toString(value).getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("Failed to encode string in UTF-8", e); + } + } + + @Override + public byte[] readString() { + return convert(valuesReader.readFloat()); + } + + @Override + public byte[] readString(int id) { + return convert(dict.decodeToFloat(id)); + } + } + + public static class TypesFromDoublePageReader extends DefaultParquetDataColumnReader { + + public TypesFromDoublePageReader(ValuesReader realReader) { + super(realReader); + } + + public TypesFromDoublePageReader(Dictionary realReader) { + super(realReader); + } + + private byte[] convert(double value) { + try { + // convert float to string + return Double.toString(value).getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("Failed to encode string in UTF-8", e); + } + } + + @Override + public byte[] readString() { + return convert(valuesReader.readDouble()); + } + + @Override + public byte[] readString(int id) { + return convert(dict.decodeToDouble(id)); + } + } + + /** + * The reader who reads from the underlying boolean value value. + */ + public static class TypesFromBooleanPageReader extends DefaultParquetDataColumnReader { + + public TypesFromBooleanPageReader(ValuesReader valuesReader) { + super(valuesReader); + } + + public TypesFromBooleanPageReader(Dictionary dict) { + super(dict); + } + + @Override + public byte[] readString(int id) { + final boolean value = dict.decodeToBoolean(id); + + try { + // convert boolean to string + return Boolean.toString(value).getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("Failed to encode string in UTF-8", e); + } + } + + @Override + public byte[] readString() { + final boolean value = valuesReader.readBoolean(); + + try { + // convert boolean to string + return Boolean.toString(value).getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("Failed to encode string in UTF-8", e); + } + } + } + + public static class TypesFromInt96PageReader extends DefaultParquetDataColumnReader { + private boolean skipTimestampConversion = false; + + public TypesFromInt96PageReader(ValuesReader realReader, boolean skipTimestampConversion) { + super(realReader); + this.skipTimestampConversion = skipTimestampConversion; + } + + public TypesFromInt96PageReader(Dictionary dict, boolean skipTimestampConversion) { + super(dict); + this.skipTimestampConversion = skipTimestampConversion; + } + + private Timestamp convert(Binary binary){ + ByteBuffer buf = binary.toByteBuffer(); + buf.order(ByteOrder.LITTLE_ENDIAN); + long timeOfDayNanos = buf.getLong(); + int julianDay = buf.getInt(); + NanoTime nt = new NanoTime(julianDay, timeOfDayNanos); + return NanoTimeUtils.getTimestamp(nt, skipTimestampConversion); + } + + @Override + public Timestamp readTimestamp(int id) { + return convert(dict.decodeToBinary(id)); + } + + @Override + public Timestamp readTimestamp() { + return convert(valuesReader.readBytes()); + } + + @Override + public byte[] readString() { + //TODO use timestampwritable to do the convert? + try { + return readTimestamp().toString().getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("Failed to encode string in UTF-8", e); + } + } + @Override + public byte[] readString(int id) { + //TODO use timestampwritable to do the convert? + try { + return readTimestamp(id).toString().getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("Failed to encode string in UTF-8", e); + } + } + } + + public static class TypesFromDecimalPageReader extends DefaultParquetDataColumnReader { + private HiveDecimalWritable tempDecimal = new HiveDecimalWritable(); + private short scale; + + public TypesFromDecimalPageReader(ValuesReader realReader, short scale) { + super(realReader); + this.scale = scale; + } + + public TypesFromDecimalPageReader(Dictionary dict, short scale) { + super(dict); + this.scale = scale; + } + + private byte[] convertToString(Binary value){ + tempDecimal.set(value.getBytesUnsafe(), scale); + + try { + // convert boolean to string + return tempDecimal.toString().getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("Failed to encode string in UTF-8", e); + } + } + + @Override + public byte[] readString(int id) { + return convertToString(dict.decodeToBinary(id)); + } + + @Override + public byte[] readString() { + return convertToString(valuesReader.readBytes()); + } + } + + private static ParquetDataColumnReader getDataColumnReaderByTypeHelper(boolean isDictionary, + PrimitiveType parquetType, + Dictionary dictionary, + ValuesReader valuesReader, + boolean + skipTimestampConversion) + throws IOException { + switch (parquetType.getPrimitiveTypeName()) { + case INT32: + return isDictionary ? new TypesFromInt32PageReader(dictionary) : new + TypesFromInt32PageReader(valuesReader); + case INT64: + return isDictionary ? new TypesFromInt64PageReader(dictionary) : new + TypesFromInt64PageReader(valuesReader); + case FLOAT: + return isDictionary ? new TypesFromFloatPageReader(dictionary) : new + TypesFromFloatPageReader(valuesReader); + case INT96: + return isDictionary ? new TypesFromInt96PageReader(dictionary, skipTimestampConversion) : new + TypesFromInt96PageReader(valuesReader, skipTimestampConversion); + case BOOLEAN: + return isDictionary ? new TypesFromBooleanPageReader(dictionary) : new + TypesFromBooleanPageReader(valuesReader); + case BINARY: + case FIXED_LEN_BYTE_ARRAY: + return getConvertorFromBinary(isDictionary, parquetType, valuesReader, dictionary); + case DOUBLE: + return isDictionary ? new TypesFromDoublePageReader(dictionary) : new + TypesFromDoublePageReader(valuesReader); + default: + return isDictionary ? new DefaultParquetDataColumnReader(dictionary) : new + DefaultParquetDataColumnReader(valuesReader); + } + } + + private static ParquetDataColumnReader getConvertorFromBinary(boolean isDict, + PrimitiveType parquetType, + ValuesReader valuesReader, + Dictionary dictionary) { + OriginalType originalType = parquetType.getOriginalType(); + + if (originalType == null) { + return isDict ? new DefaultParquetDataColumnReader(dictionary) : new + DefaultParquetDataColumnReader(valuesReader); + } + switch (originalType) { + case DECIMAL: + final short scale = (short) parquetType.asPrimitiveType().getDecimalMetadata().getScale(); + return isDict ? new TypesFromDecimalPageReader(dictionary, scale) : new + TypesFromDecimalPageReader(valuesReader, scale); + default: + return isDict ? new DefaultParquetDataColumnReader(dictionary) : new + DefaultParquetDataColumnReader(valuesReader); + } + } + + public static ParquetDataColumnReader getDataColumnReaderByTypeOnDictionary( + PrimitiveType parquetType, + Dictionary realReader, boolean skipTimestampConversion) + throws IOException { + return getDataColumnReaderByTypeHelper(true, parquetType, realReader, null, + skipTimestampConversion); + } + + public static ParquetDataColumnReader getDataColumnReaderByType(PrimitiveType parquetType, + ValuesReader realReader, + boolean skipTimestampConversion) + throws IOException { + return getDataColumnReaderByTypeHelper(false, parquetType, null, realReader, + skipTimestampConversion); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedDummyColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedDummyColumnReader.java new file mode 100644 index 0000000..ee1d692 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedDummyColumnReader.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.io.parquet.vector; + +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; + +import java.io.IOException; +import java.util.Arrays; + +/** + * A dummy vectorized parquet reader to support schema evolution. + */ +public class VectorizedDummyColumnReader extends BaseVectorizedColumnReader { + + public VectorizedDummyColumnReader() { + super(); + } + + @Override + public void readBatch(int total, ColumnVector column, TypeInfo columnType) throws IOException { + Arrays.fill(column.isNull, true); + column.isRepeating = true; + column.noNulls = false; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedListColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedListColumnReader.java index c36640d..931602e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedListColumnReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedListColumnReader.java @@ -46,8 +46,8 @@ boolean isFirstRow = true; public VectorizedListColumnReader(ColumnDescriptor descriptor, PageReader pageReader, - boolean skipTimestampConversion, Type type) throws IOException { - super(descriptor, pageReader, skipTimestampConversion, type); + boolean skipTimestampConversion, Type type, TypeInfo hiveType) throws IOException { + super(descriptor, pageReader, skipTimestampConversion, type, hiveType); } @Override @@ -147,24 +147,22 @@ private Object readPrimitiveTypedRow(PrimitiveObjectInspector.PrimitiveCategory case INT: case BYTE: case SHORT: - return dataColumn.readInteger(); + case LONG: + return dataColumn.readLong(); case DATE: case INTERVAL_YEAR_MONTH: - case LONG: - return dataColumn.readLong(); case BOOLEAN: - return dataColumn.readBoolean() ? 1 : 0; - case DOUBLE: - return dataColumn.readDouble(); + return dataColumn.readBoolean(); case BINARY: case STRING: case CHAR: case VARCHAR: - return dataColumn.readBytes().getBytesUnsafe(); + return dataColumn.readBytes(); case FLOAT: - return dataColumn.readFloat(); + case DOUBLE: + return dataColumn.readDouble(); case DECIMAL: - return dataColumn.readBytes().getBytesUnsafe(); + return dataColumn.readBytes(); case INTERVAL_DAY_TIME: case TIMESTAMP: default: @@ -176,36 +174,37 @@ private List decodeDictionaryIds(List valueList) { int total = valueList.size(); List resultList; List intList = (List) valueList; + //FIXME: need to be updated to support schema evolution switch (descriptor.getType()) { case INT32: resultList = new ArrayList(total); for (int i = 0; i < total; ++i) { - resultList.add(dictionary.decodeToInt(intList.get(i))); + resultList.add(dictionary.readInteger(intList.get(i))); } break; case INT64: resultList = new ArrayList(total); for (int i = 0; i < total; ++i) { - resultList.add(dictionary.decodeToLong(intList.get(i))); + resultList.add(dictionary.readLong(intList.get(i))); } break; case FLOAT: resultList = new ArrayList(total); for (int i = 0; i < total; ++i) { - resultList.add(dictionary.decodeToFloat(intList.get(i))); + resultList.add(dictionary.readFloat(intList.get(i))); } break; case DOUBLE: resultList = new ArrayList(total); for (int i = 0; i < total; ++i) { - resultList.add(dictionary.decodeToDouble(intList.get(i))); + resultList.add(dictionary.readDouble(intList.get(i))); } break; case BINARY: case FIXED_LEN_BYTE_ARRAY: resultList = new ArrayList(total); for (int i = 0; i < total; ++i) { - resultList.add(dictionary.decodeToBinary(intList.get(i)).getBytesUnsafe()); + resultList.add(dictionary.readBytes(intList.get(i))); } break; default: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedParquetRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedParquetRecordReader.java index 08ac57b..651fcd5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedParquetRecordReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedParquetRecordReader.java @@ -1,9 +1,13 @@ /* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -11,9 +15,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hive.ql.io.parquet.vector; -import com.google.common.annotations.VisibleForTesting; +package org.apache.hadoop.hive.ql.io.parquet.vector; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; @@ -474,9 +477,13 @@ private VectorizedColumnReader buildVectorizedParquetReader( if (columnDescriptors == null || columnDescriptors.isEmpty()) { throw new RuntimeException( "Failed to find related Parquet column descriptor with type " + type); - } else { + } + if (fileSchema.getColumns().contains(descriptors.get(0))) { return new VectorizedPrimitiveColumnReader(descriptors.get(0), - pages.getPageReader(descriptors.get(0)), skipTimestampConversion, type); + pages.getPageReader(descriptors.get(0)), skipTimestampConversion, type, typeInfo); + } else { + // Support for schema evolution + return new VectorizedDummyColumnReader(); } case STRUCT: StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; @@ -503,7 +510,7 @@ private VectorizedColumnReader buildVectorizedParquetReader( "Failed to find related Parquet column descriptor with type " + type); } return new VectorizedListColumnReader(descriptors.get(0), - pages.getPageReader(descriptors.get(0)), skipTimestampConversion, type); + pages.getPageReader(descriptors.get(0)), skipTimestampConversion, type, typeInfo); case MAP: if (columnDescriptors == null || columnDescriptors.isEmpty()) { throw new RuntimeException( @@ -535,10 +542,10 @@ private VectorizedColumnReader buildVectorizedParquetReader( List kvTypes = groupType.getFields(); VectorizedListColumnReader keyListColumnReader = new VectorizedListColumnReader( descriptors.get(0), pages.getPageReader(descriptors.get(0)), skipTimestampConversion, - kvTypes.get(0)); + kvTypes.get(0), typeInfo); VectorizedListColumnReader valueListColumnReader = new VectorizedListColumnReader( descriptors.get(1), pages.getPageReader(descriptors.get(1)), skipTimestampConversion, - kvTypes.get(1)); + kvTypes.get(1), typeInfo); return new VectorizedMapColumnReader(keyListColumnReader, valueListColumnReader); case UNION: default: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java index 39689f1..8a58d18 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java @@ -19,17 +19,12 @@ import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; -import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime; -import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.column.page.PageReader; import org.apache.parquet.schema.Type; import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.sql.Timestamp; /** * It's column level Parquet reader which is used to read a batch of records for a column, @@ -41,8 +36,8 @@ public VectorizedPrimitiveColumnReader( ColumnDescriptor descriptor, PageReader pageReader, boolean skipTimestampConversion, - Type type) throws IOException { - super(descriptor, pageReader, skipTimestampConversion, type); + Type type, TypeInfo hiveType) throws IOException { + super(descriptor, pageReader, skipTimestampConversion, type, hiveType); } @Override @@ -64,7 +59,7 @@ public void readBatch( LongColumnVector dictionaryIds = new LongColumnVector(); // Read and decode dictionary ids. readDictionaryIDs(num, dictionaryIds, rowId); - decodeDictionaryIds(rowId, num, column, dictionaryIds); + decodeDictionaryIds(rowId, num, column, columnType, dictionaryIds); } else { // assign values in vector readBatchHelper(num, column, columnType, rowId); @@ -99,10 +94,12 @@ private void readBatchHelper( readDoubles(num, (DoubleColumnVector) column, rowId); break; case BINARY: + readBinaries(num, (BytesColumnVector) column, rowId); + break; case STRING: case CHAR: case VARCHAR: - readBinaries(num, (BytesColumnVector) column, rowId); + readString(num, (BytesColumnVector) column, rowId); break; case FLOAT: readFloats(num, (DoubleColumnVector) column, rowId); @@ -255,7 +252,7 @@ private void readDecimal( while (left > 0) { readRepetitionAndDefinitionLevels(); if (definitionLevel >= maxDefLevel) { - c.vector[rowId].set(dataColumn.readBytes().getBytesUnsafe(), c.scale); + c.vector[rowId].set(dataColumn.readDecimal(), c.scale); c.isNull[rowId] = false; c.isRepeating = c.isRepeating && (c.vector[0] == c.vector[rowId]); } else { @@ -268,6 +265,28 @@ private void readDecimal( } } + private void readString( + int total, + BytesColumnVector c, + int rowId) throws IOException { + int left = total; + while (left > 0) { + readRepetitionAndDefinitionLevels(); + if (definitionLevel >= maxDefLevel) { + c.setVal(rowId, dataColumn.readString()); + c.isNull[rowId] = false; + // TODO figure out a better way to set repeat for Binary type + c.isRepeating = false; + } else { + c.isNull[rowId] = true; + c.isRepeating = false; + c.noNulls = false; + } + rowId++; + left--; + } + } + private void readBinaries( int total, BytesColumnVector c, @@ -276,7 +295,7 @@ private void readBinaries( while (left > 0) { readRepetitionAndDefinitionLevels(); if (definitionLevel >= maxDefLevel) { - c.setVal(rowId, dataColumn.readBytes().getBytesUnsafe()); + c.setVal(rowId, dataColumn.readBytes()); c.isNull[rowId] = false; // TODO figure out a better way to set repeat for Binary type c.isRepeating = false; @@ -298,9 +317,7 @@ private void readTimestamp(int total, TimestampColumnVector c, int rowId) throws switch (descriptor.getType()) { //INT64 is not yet supported case INT96: - NanoTime nt = NanoTime.fromBinary(dataColumn.readBytes()); - Timestamp ts = NanoTimeUtils.getTimestamp(nt, skipTimestampConversion); - c.set(rowId, ts); + c.set(rowId, dataColumn.readTimestamp()); break; default: throw new IOException( @@ -326,6 +343,7 @@ private void decodeDictionaryIds( int rowId, int num, ColumnVector column, + TypeInfo columnType, LongColumnVector dictionaryIds) { System.arraycopy(dictionaryIds.isNull, rowId, column.isNull, rowId, num); if (column.noNulls) { @@ -333,63 +351,78 @@ private void decodeDictionaryIds( } column.isRepeating = column.isRepeating && dictionaryIds.isRepeating; - switch (descriptor.getType()) { - case INT32: + + PrimitiveTypeInfo primitiveColumnType = (PrimitiveTypeInfo) columnType; + + switch (primitiveColumnType.getPrimitiveCategory()) { + case INT: + case BYTE: + case SHORT: for (int i = rowId; i < rowId + num; ++i) { ((LongColumnVector) column).vector[i] = - dictionary.decodeToInt((int) dictionaryIds.vector[i]); + dictionary.readInteger((int) dictionaryIds.vector[i]); } break; - case INT64: + case DATE: + case INTERVAL_YEAR_MONTH: + case LONG: for (int i = rowId; i < rowId + num; ++i) { ((LongColumnVector) column).vector[i] = - dictionary.decodeToLong((int) dictionaryIds.vector[i]); + dictionary.readLong((int) dictionaryIds.vector[i]); } break; - case FLOAT: + case BOOLEAN: for (int i = rowId; i < rowId + num; ++i) { - ((DoubleColumnVector) column).vector[i] = - dictionary.decodeToFloat((int) dictionaryIds.vector[i]); + ((LongColumnVector) column).vector[i] = + dictionary.readBoolean((int) dictionaryIds.vector[i]) ? 1 : 0; } break; case DOUBLE: for (int i = rowId; i < rowId + num; ++i) { ((DoubleColumnVector) column).vector[i] = - dictionary.decodeToDouble((int) dictionaryIds.vector[i]); + dictionary.readDouble((int) dictionaryIds.vector[i]); } break; - case INT96: + case BINARY: for (int i = rowId; i < rowId + num; ++i) { - ByteBuffer buf = dictionary.decodeToBinary((int) dictionaryIds.vector[i]).toByteBuffer(); - buf.order(ByteOrder.LITTLE_ENDIAN); - long timeOfDayNanos = buf.getLong(); - int julianDay = buf.getInt(); - NanoTime nt = new NanoTime(julianDay, timeOfDayNanos); - Timestamp ts = NanoTimeUtils.getTimestamp(nt, skipTimestampConversion); - ((TimestampColumnVector) column).set(i, ts); + ((BytesColumnVector) column) + .setVal(i, dictionary.readBytes((int) dictionaryIds.vector[i])); } break; - case BINARY: - case FIXED_LEN_BYTE_ARRAY: - if (column instanceof BytesColumnVector) { - for (int i = rowId; i < rowId + num; ++i) { - ((BytesColumnVector) column) - .setVal(i, dictionary.decodeToBinary((int) dictionaryIds.vector[i]).getBytesUnsafe()); - } - } else { - DecimalColumnVector decimalColumnVector = ((DecimalColumnVector) column); - decimalColumnVector.precision = + case STRING: + case CHAR: + case VARCHAR: + for (int i = rowId; i < rowId + num; ++i) { + ((BytesColumnVector) column) + .setVal(i, dictionary.readString((int) dictionaryIds.vector[i])); + } + break; + case FLOAT: + for (int i = rowId; i < rowId + num; ++i) { + ((DoubleColumnVector) column).vector[i] = + dictionary.readFloat((int) dictionaryIds.vector[i]); + } + break; + case DECIMAL: + DecimalColumnVector decimalColumnVector = ((DecimalColumnVector) column); + decimalColumnVector.precision = (short) type.asPrimitiveType().getDecimalMetadata().getPrecision(); - decimalColumnVector.scale = (short) type.asPrimitiveType().getDecimalMetadata().getScale(); - for (int i = rowId; i < rowId + num; ++i) { - decimalColumnVector.vector[i] - .set(dictionary.decodeToBinary((int) dictionaryIds.vector[i]).getBytesUnsafe(), - decimalColumnVector.scale); - } + decimalColumnVector.scale = (short) type.asPrimitiveType().getDecimalMetadata().getScale(); + for (int i = rowId; i < rowId + num; ++i) { + decimalColumnVector.vector[i] + .set(dictionary.readDecimal((int) dictionaryIds.vector[i]), + decimalColumnVector.scale); } break; + case TIMESTAMP: + for (int i = rowId; i < rowId + num; ++i) { + ((TimestampColumnVector) column) + .set(i, dictionary.readTimestamp((int) dictionaryIds.vector[i])); + } + break; + case INTERVAL_DAY_TIME: default: - throw new UnsupportedOperationException("Unsupported type: " + descriptor.getType()); + throw new UnsupportedOperationException("Unsupported type: " + type); } } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestVectorizedColumnReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestVectorizedColumnReader.java index 9e414dc..a6f4c18 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestVectorizedColumnReader.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestVectorizedColumnReader.java @@ -22,6 +22,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.IOConstants; import org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader; +import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.JobConf; @@ -55,26 +56,40 @@ public static void cleanup() throws IOException { @Test public void testIntRead() throws Exception { intRead(isDictionaryEncoding); + longReadInt(isDictionaryEncoding); + floatReadInt(isDictionaryEncoding); + doubleReadInt(isDictionaryEncoding); } @Test public void testLongRead() throws Exception { longRead(isDictionaryEncoding); + floatReadLong(isDictionaryEncoding); + doubleReadLong(isDictionaryEncoding); + } + + @Test + public void testTimestamp() throws Exception { + timestampRead(isDictionaryEncoding); + stringReadTimestamp(isDictionaryEncoding); } @Test public void testDoubleRead() throws Exception { doubleRead(isDictionaryEncoding); + stringReadDouble(isDictionaryEncoding); } @Test public void testFloatRead() throws Exception { floatRead(isDictionaryEncoding); + doubleReadFloat(isDictionaryEncoding); } @Test public void testBooleanRead() throws Exception { booleanRead(); + stringReadBoolean(); } @Test @@ -101,6 +116,7 @@ public void structReadSomeNull() throws Exception { @Test public void decimalRead() throws Exception { decimalRead(isDictionaryEncoding); + stringReadDecimal(isDictionaryEncoding); } private class TestVectorizedParquetRecordReader extends VectorizedParquetRecordReader { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestVectorizedDictionaryEncodingColumnReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestVectorizedDictionaryEncodingColumnReader.java index 3e5d831..32d27d9 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestVectorizedDictionaryEncodingColumnReader.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestVectorizedDictionaryEncodingColumnReader.java @@ -41,21 +41,34 @@ public static void cleanup() throws IOException { @Test public void testIntRead() throws Exception { intRead(isDictionaryEncoding); + longReadInt(isDictionaryEncoding); + floatReadInt(isDictionaryEncoding); + doubleReadInt(isDictionaryEncoding); } @Test public void testLongRead() throws Exception { longRead(isDictionaryEncoding); + floatReadLong(isDictionaryEncoding); + doubleReadLong(isDictionaryEncoding); + } + + @Test + public void testTimestamp() throws Exception { + timestampRead(isDictionaryEncoding); + stringReadTimestamp(isDictionaryEncoding); } @Test public void testDoubleRead() throws Exception { doubleRead(isDictionaryEncoding); + stringReadDouble(isDictionaryEncoding); } @Test public void testFloatRead() throws Exception { floatRead(isDictionaryEncoding); + doubleReadFloat(isDictionaryEncoding); } @Test @@ -81,5 +94,6 @@ public void structReadSomeNull() throws Exception { @Test public void decimalRead() throws Exception { decimalRead(isDictionaryEncoding); + stringReadDecimal(isDictionaryEncoding); } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java index 5d3ebd6..87b24e3 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java @@ -21,17 +21,12 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; +import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.io.IOConstants; import org.apache.hadoop.hive.ql.io.parquet.read.DataWritableReadSupport; import org.apache.hadoop.hive.ql.io.parquet.serde.ArrayWritableObjectInspector; +import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime; +import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; import org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.MapWork; @@ -43,8 +38,8 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.FileSplit; -import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.parquet.example.data.Group; import org.apache.parquet.example.data.simple.SimpleGroupFactory; @@ -54,9 +49,10 @@ import org.apache.parquet.hadoop.example.GroupWriteSupport; import org.apache.parquet.io.api.Binary; import org.apache.parquet.schema.MessageType; + import java.io.IOException; -import java.math.BigDecimal; -import java.math.BigInteger; +import java.sql.Timestamp; +import java.util.Arrays; import java.util.List; import static junit.framework.Assert.assertTrue; @@ -77,84 +73,84 @@ protected final static Path file = new Path("target/test/TestParquetVectorReader/testParquetFile"); protected static final MessageType schema = parseMessageType( - "message hive_schema { " - + "required int32 int32_field; " - + "required int64 int64_field; " - + "required int96 int96_field; " - + "required double double_field; " - + "required float float_field; " - + "required boolean boolean_field; " - + "required fixed_len_byte_array(3) flba_field; " - + "optional fixed_len_byte_array(1) some_null_field; " - + "optional fixed_len_byte_array(1) all_null_field; " - + "required binary binary_field; " - + "optional binary binary_field_some_null; " - + "required binary value (DECIMAL(5,2)); " - + "required group struct_field {" - + " required int32 a;\n" - + " required double b;\n" - + "}\n" - + "optional group nested_struct_field {" - + " optional group nsf {" - + " optional int32 c;\n" - + " optional int32 d;\n" - + " }\n" - + " optional double e;\n" - + "}\n" - + "optional group struct_field_some_null {" - + " optional int32 f;\n" - + " optional double g;\n" - + "}\n" - + "optional group map_field (MAP) {\n" - + " repeated group map (MAP_KEY_VALUE) {\n" - + " required binary key;\n" - + " optional binary value;\n" - + " }\n" - + "}\n" - + "optional group array_list (LIST) {\n" - + " repeated group bag {\n" - + " optional int32 array_element;\n" - + " }\n" - + "}\n" - + "repeated int32 list_int32_field;" - + "repeated int64 list_int64_field;" - + "repeated double list_double_field;" - + "repeated float list_float_field;" - + "repeated boolean list_boolean_field;" - + "repeated fixed_len_byte_array(3) list_byte_array_field;" - + "repeated binary list_binary_field;" - + "repeated binary list_decimal_field (DECIMAL(5,2));" - + "repeated binary list_binary_field_for_repeat_test;" - + "repeated int32 list_int32_field_for_repeat_test;" - + "repeated group map_int32 (MAP_KEY_VALUE) {\n" - + " required int32 key;\n" - + " optional int32 value;\n" - + "}\n" - + "repeated group map_int64 (MAP_KEY_VALUE) {\n" - + " required int64 key;\n" - + " optional int64 value;\n" - + "}\n" - + "repeated group map_double (MAP_KEY_VALUE) {\n" - + " required double key;\n" - + " optional double value;\n" - + "}\n" - + "repeated group map_float (MAP_KEY_VALUE) {\n" - + " required float key;\n" - + " optional float value;\n" - + "}\n" - + "repeated group map_binary (MAP_KEY_VALUE) {\n" - + " required binary key;\n" - + " optional binary value;\n" - + "}\n" - + "repeated group map_decimal (MAP_KEY_VALUE) {\n" - + " required binary key (DECIMAL(5,2));\n" - + " optional binary value (DECIMAL(5,2));\n" - + "}\n" - + "repeated group map_int32_for_repeat_test (MAP_KEY_VALUE) {\n" - + " required int32 key;\n" - + " optional int32 value;\n" - + "}\n" - + "} "); + "message hive_schema { " + + "required int32 int32_field; " + + "required int64 int64_field; " + + "required int96 int96_field; " + + "required double double_field; " + + "required float float_field; " + + "required boolean boolean_field; " + + "required fixed_len_byte_array(3) flba_field; " + + "optional fixed_len_byte_array(1) some_null_field; " + + "optional fixed_len_byte_array(1) all_null_field; " + + "required binary binary_field; " + + "optional binary binary_field_some_null; " + + "required binary value (DECIMAL(5,2)); " + + "required group struct_field {" + + " required int32 a;\n" + + " required double b;\n" + + "}\n" + + "optional group nested_struct_field {" + + " optional group nsf {" + + " optional int32 c;\n" + + " optional int32 d;\n" + + " }\n" + + " optional double e;\n" + + "}\n" + + "optional group struct_field_some_null {" + + " optional int32 f;\n" + + " optional double g;\n" + + "}\n" + + "optional group map_field (MAP) {\n" + + " repeated group map (MAP_KEY_VALUE) {\n" + + " required binary key;\n" + + " optional binary value;\n" + + " }\n" + + "}\n" + + "optional group array_list (LIST) {\n" + + " repeated group bag {\n" + + " optional int32 array_element;\n" + + " }\n" + + "}\n" + + "repeated int32 list_int32_field;" + + "repeated int64 list_int64_field;" + + "repeated double list_double_field;" + + "repeated float list_float_field;" + + "repeated boolean list_boolean_field;" + + "repeated fixed_len_byte_array(3) list_byte_array_field;" + + "repeated binary list_binary_field;" + + "repeated binary list_decimal_field (DECIMAL(5,2));" + + "repeated binary list_binary_field_for_repeat_test;" + + "repeated int32 list_int32_field_for_repeat_test;" + + "repeated group map_int32 (MAP_KEY_VALUE) {\n" + + " required int32 key;\n" + + " optional int32 value;\n" + + "}\n" + + "repeated group map_int64 (MAP_KEY_VALUE) {\n" + + " required int64 key;\n" + + " optional int64 value;\n" + + "}\n" + + "repeated group map_double (MAP_KEY_VALUE) {\n" + + " required double key;\n" + + " optional double value;\n" + + "}\n" + + "repeated group map_float (MAP_KEY_VALUE) {\n" + + " required float key;\n" + + " optional float value;\n" + + "}\n" + + "repeated group map_binary (MAP_KEY_VALUE) {\n" + + " required binary key;\n" + + " optional binary value;\n" + + "}\n" + + "repeated group map_decimal (MAP_KEY_VALUE) {\n" + + " required binary key (DECIMAL(5,2));\n" + + " optional binary value (DECIMAL(5,2));\n" + + "}\n" + + "repeated group map_int32_for_repeat_test (MAP_KEY_VALUE) {\n" + + " required int32 key;\n" + + " optional int32 value;\n" + + "}\n" + + "} "); protected static void removeFile() throws IOException { FileSystem fs = file.getFileSystem(conf); @@ -166,73 +162,66 @@ protected static void removeFile() throws IOException { protected static ParquetWriter initWriterFromFile() throws IOException { GroupWriteSupport.setSchema(schema, conf); return new ParquetWriter<>( - file, - new GroupWriteSupport(), - GZIP, 1024 * 1024, 1024, 1024 * 1024, - true, false, PARQUET_1_0, conf); + file, + new GroupWriteSupport(), + GZIP, 1024 * 1024, 1024, 1024 * 1024, + true, false, PARQUET_1_0, conf); } protected static int getIntValue( - boolean isDictionaryEncoding, - int index) { + boolean isDictionaryEncoding, + int index) { return isDictionaryEncoding ? index % UNIQUE_NUM : index; } protected static double getDoubleValue( - boolean isDictionaryEncoding, - int index) { + boolean isDictionaryEncoding, + int index) { return isDictionaryEncoding ? index % UNIQUE_NUM : index; } protected static long getLongValue( - boolean isDictionaryEncoding, - int index) { + boolean isDictionaryEncoding, + int index) { return isDictionaryEncoding ? (long) 2 * index % UNIQUE_NUM : (long) 2 * index; } protected static float getFloatValue( - boolean isDictionaryEncoding, - int index) { + boolean isDictionaryEncoding, + int index) { return (float) (isDictionaryEncoding ? index % UNIQUE_NUM * 2.0 : index * 2.0); } protected static boolean getBooleanValue( - float index) { + float index) { return (index % 2 == 0); } - protected static String getTimestampStr(int index) { - String s = String.valueOf(index); - int l = 4 - s.length(); - for (int i = 0; i < l; i++) { - s = "0" + s; - } - return "99999999" + s; + protected static NanoTime getNanoTime(int index) { + return NanoTimeUtils.getNanoTime(new Timestamp(index), false); } protected static HiveDecimal getDecimal( - boolean isDictionaryEncoding, - int index) { + boolean isDictionaryEncoding, + int index) { int decimalVal = index % 100; String decimalStr = (decimalVal < 10) ? "0" + String.valueOf(decimalVal) : String.valueOf - (decimalVal); + (decimalVal); int intVal = (isDictionaryEncoding) ? index % UNIQUE_NUM : index / 100; - String d = String.valueOf(intVal) + decimalStr; - BigInteger bi = new BigInteger(d); - BigDecimal bd = new BigDecimal(bi); - return HiveDecimal.create(bd); + String d = String.valueOf(intVal) + "." + decimalStr; + return HiveDecimal.create(d); } protected static Binary getTimestamp( - boolean isDictionaryEncoding, - int index) { - String s = isDictionaryEncoding ? getTimestampStr(index % UNIQUE_NUM) : getTimestampStr(index); - return Binary.fromReusedByteArray(s.getBytes()); + boolean isDictionaryEncoding, + int index) { + NanoTime s = isDictionaryEncoding ? getNanoTime(index % UNIQUE_NUM) : getNanoTime(index); + return s.toBinary(); } protected static String getStr( - boolean isDictionaryEncoding, - int index) { + boolean isDictionaryEncoding, + int index) { int binaryLen = isDictionaryEncoding ? index % UNIQUE_NUM : index; String v = ""; while (binaryLen > 0) { @@ -244,8 +233,8 @@ protected static String getStr( } protected static Binary getBinaryValue( - boolean isDictionaryEncoding, - int index) { + boolean isDictionaryEncoding, + int index) { return Binary.fromString(getStr(isDictionaryEncoding, index)); } @@ -254,20 +243,20 @@ protected static boolean isNull(int index) { } public static VectorizedParquetRecordReader createTestParquetReader(String schemaString, Configuration conf) - throws IOException, InterruptedException, HiveException { + throws IOException, InterruptedException, HiveException { conf.set(PARQUET_READ_SCHEMA, schemaString); HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true); HiveConf.setVar(conf, HiveConf.ConfVars.PLAN, "//tmp"); Job vectorJob = new Job(conf, "read vector"); ParquetInputFormat.setInputPaths(vectorJob, file); initialVectorizedRowBatchCtx(conf); - return new VectorizedParquetRecordReader(getFileSplit(vectorJob),new JobConf(conf)); + return new VectorizedParquetRecordReader(getFileSplit(vectorJob), new JobConf(conf)); } protected static FileSplit getFileSplit(Job vectorJob) throws IOException, InterruptedException { ParquetInputFormat parquetInputFormat = new ParquetInputFormat(GroupReadSupport.class); InputSplit split = (InputSplit) parquetInputFormat.getSplits(vectorJob).get(0); - FileSplit fsplit = new FileSplit(file,0L,split.getLength(),split.getLocations()); + FileSplit fsplit = new FileSplit(file, 0L, split.getLength(), split.getLocations()); return fsplit; } @@ -284,13 +273,13 @@ protected static void writeData(ParquetWriter writer, boolean isDictionar boolean booleanVal = getBooleanValue(i); Binary binary = getBinaryValue(isDictionaryEncoding, i); Group group = f.newGroup() - .append("int32_field", intVal) - .append("int64_field", longVal) - .append("int96_field", timeStamp) - .append("double_field", doubleVal) - .append("float_field", floatVal) - .append("boolean_field", booleanVal) - .append("flba_field", "abc"); + .append("int32_field", intVal) + .append("int64_field", longVal) + .append("int96_field", timeStamp) + .append("double_field", doubleVal) + .append("float_field", floatVal) + .append("boolean_field", booleanVal) + .append("flba_field", "abc"); if (!isNull) { group.append("some_null_field", "x"); @@ -306,8 +295,8 @@ protected static void writeData(ParquetWriter writer, boolean isDictionar group.append("value", Binary.fromConstantByteArray(w.getInternalStorage())); group.addGroup("struct_field") - .append("a", intVal) - .append("b", doubleVal); + .append("a", intVal) + .append("b", doubleVal); Group g = group.addGroup("nested_struct_field"); @@ -358,14 +347,154 @@ private static StructObjectInspector createStructObjectInspector(Configuration c return new ArrayWritableObjectInspector((StructTypeInfo) rowTypeInfo); } - protected void intRead(boolean isDictionaryEncoding) throws InterruptedException, HiveException, IOException { - Configuration conf = new Configuration(); - conf.set(IOConstants.COLUMNS,"int32_field"); - conf.set(IOConstants.COLUMNS_TYPES,"int"); + protected void timestampRead(boolean isDictionaryEncoding) throws InterruptedException, + HiveException, IOException { + conf.set(IOConstants.COLUMNS, "int96_field"); + conf.set(IOConstants.COLUMNS_TYPES, "timestamp"); + conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + VectorizedParquetRecordReader reader = createTestParquetReader("message test { required " + + "int96 int96_field;}", conf); + VectorizedRowBatch previous = reader.createValue(); + try { + int c = 0; + while (reader.next(NullWritable.get(), previous)) { + TimestampColumnVector vector = (TimestampColumnVector) previous.cols[0]; + assertTrue(vector.noNulls); + for (int i = 0; i < vector.nanos.length; i++) { + if (c == nElements) { + break; + } + Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp(c); + assertEquals("Not the same time at " + c, expected.getTime(), vector.getTime(i)); + assertEquals("Not the same nano at " + c, expected.getNanos(), vector.getNanos(i)); + assertFalse(vector.isNull[i]); + c++; + } + } + assertEquals(nElements, c); + } finally { + reader.close(); + } + } + + protected void stringReadTimestamp(boolean isDictionaryEncoding) throws InterruptedException, + HiveException, IOException { + conf.set(IOConstants.COLUMNS, "int96_field"); + conf.set(IOConstants.COLUMNS_TYPES, "string"); + conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + VectorizedParquetRecordReader reader = createTestParquetReader("message test { required " + + "int96 int96_field;}", conf); + VectorizedRowBatch previous = reader.createValue(); + try { + int c = 0; + while (reader.next(NullWritable.get(), previous)) { + BytesColumnVector vector = (BytesColumnVector) previous.cols[0]; + assertTrue(vector.noNulls); + for (int i = 0; i < vector.vector.length; i++) { + if (c == nElements) { + break; + } + + Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp( + c); + String actual = new String(Arrays + .copyOfRange(vector.vector[i], vector.start[i], vector.start[i] + vector.length[i])); + assertEquals("Not the same time at " + c, expected.toString(), actual); + + assertFalse(vector.isNull[i]); + c++; + } + } + assertEquals(nElements, c); + } finally { + reader.close(); + } + } + + protected void floatReadInt(boolean isDictionaryEncoding) throws InterruptedException, + HiveException, IOException { + conf.set(IOConstants.COLUMNS, "int32_field"); + conf.set(IOConstants.COLUMNS_TYPES, "float"); + conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + VectorizedParquetRecordReader reader = createTestParquetReader("message test { required int32" + + " int32_field;}", conf); + VectorizedRowBatch previous = reader.createValue(); + try { + int c = 0; + while (reader.next(NullWritable.get(), previous)) { + DoubleColumnVector vector = (DoubleColumnVector) previous.cols[0]; + assertTrue(vector.noNulls); + for (int i = 0; i < vector.vector.length; i++) { + if (c == nElements) { + break; + } + assertEquals("Failed at " + c, getIntValue(isDictionaryEncoding, c), vector.vector[i], 0); + assertFalse(vector.isNull[i]); + c++; + } + } + assertEquals(nElements, c); + } finally { + reader.close(); + } + } + + protected void doubleReadInt(boolean isDictionaryEncoding) throws InterruptedException, + HiveException, IOException { + conf.set(IOConstants.COLUMNS, "int32_field"); + conf.set(IOConstants.COLUMNS_TYPES, "double"); conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); VectorizedParquetRecordReader reader = - createTestParquetReader("message test { required int32 int32_field;}", conf); + createTestParquetReader("message test { required int32 int32_field;}", conf); + VectorizedRowBatch previous = reader.createValue(); + try { + int c = 0; + while (reader.next(NullWritable.get(), previous)) { + DoubleColumnVector vector = (DoubleColumnVector) previous.cols[0]; + assertTrue(vector.noNulls); + for (int i = 0; i < vector.vector.length; i++) { + if (c == nElements) { + break; + } + assertEquals("Failed at " + c, getIntValue(isDictionaryEncoding, c), vector.vector[i], 0); + assertFalse(vector.isNull[i]); + c++; + } + } + assertEquals(nElements, c); + } finally { + reader.close(); + } + } + + protected void longReadInt(boolean isDictionaryEncoding) throws InterruptedException, + HiveException, IOException { + Configuration c = new Configuration(); + c.set(IOConstants.COLUMNS, "int32_field"); + c.set(IOConstants.COLUMNS_TYPES, "bigint"); + c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + intRead(isDictionaryEncoding, c); + } + + protected void intRead(boolean isDictionaryEncoding) throws InterruptedException, + HiveException, IOException { + Configuration c = new Configuration(); + c.set(IOConstants.COLUMNS, "int32_field"); + c.set(IOConstants.COLUMNS_TYPES, "int"); + c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + intRead(isDictionaryEncoding, c); + } + + private void intRead(boolean isDictionaryEncoding, Configuration conf) throws + InterruptedException, HiveException, IOException { + VectorizedParquetRecordReader reader = + createTestParquetReader("message test { required int32 int32_field;}", conf); VectorizedRowBatch previous = reader.createValue(); try { int c = 0; @@ -373,7 +502,7 @@ protected void intRead(boolean isDictionaryEncoding) throws InterruptedException LongColumnVector vector = (LongColumnVector) previous.cols[0]; assertTrue(vector.noNulls); for (int i = 0; i < vector.vector.length; i++) { - if(c == nElements){ + if (c == nElements) { break; } assertEquals("Failed at " + c, getIntValue(isDictionaryEncoding, c), vector.vector[i]); @@ -387,14 +516,78 @@ protected void intRead(boolean isDictionaryEncoding) throws InterruptedException } } + protected void floatReadLong(boolean isDictionaryEncoding) throws Exception { + Configuration c = new Configuration(); + c.set(IOConstants.COLUMNS, "int64_field"); + c.set(IOConstants.COLUMNS_TYPES, "float"); + c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + VectorizedParquetRecordReader reader = + createTestParquetReader("message test { required int64 int64_field;}", c); + VectorizedRowBatch previous = reader.createValue(); + try { + int count = 0; + while (reader.next(NullWritable.get(), previous)) { + DoubleColumnVector vector = (DoubleColumnVector) previous.cols[0]; + assertTrue(vector.noNulls); + for (int i = 0; i < vector.vector.length; i++) { + if (count == nElements) { + break; + } + assertEquals("Failed at " + count, getLongValue(isDictionaryEncoding, count), vector + .vector[i], 0); + assertFalse(vector.isNull[i]); + count++; + } + } + assertEquals(nElements, count); + } finally { + reader.close(); + } + } + + protected void doubleReadLong(boolean isDictionaryEncoding) throws Exception { + Configuration c = new Configuration(); + c.set(IOConstants.COLUMNS, "int64_field"); + c.set(IOConstants.COLUMNS_TYPES, "double"); + c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + VectorizedParquetRecordReader reader = + createTestParquetReader("message test { required int64 int64_field;}", c); + VectorizedRowBatch previous = reader.createValue(); + try { + int count = 0; + while (reader.next(NullWritable.get(), previous)) { + DoubleColumnVector vector = (DoubleColumnVector) previous.cols[0]; + assertTrue(vector.noNulls); + for (int i = 0; i < vector.vector.length; i++) { + if (count == nElements) { + break; + } + assertEquals("Failed at " + count, getLongValue(isDictionaryEncoding, count), + vector.vector[i], 0); + assertFalse(vector.isNull[i]); + count++; + } + } + assertEquals(nElements, count); + } finally { + reader.close(); + } + } + protected void longRead(boolean isDictionaryEncoding) throws Exception { - Configuration conf = new Configuration(); - conf.set(IOConstants.COLUMNS, "int64_field"); - conf.set(IOConstants.COLUMNS_TYPES, "bigint"); - conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); - conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + Configuration c = new Configuration(); + c.set(IOConstants.COLUMNS, "int64_field"); + c.set(IOConstants.COLUMNS_TYPES, "bigint"); + c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + longRead(isDictionaryEncoding, c); + } + + private void longRead(boolean isDictionaryEncoding, Configuration conf) throws Exception { VectorizedParquetRecordReader reader = - createTestParquetReader("message test { required int64 int64_field;}", conf); + createTestParquetReader("message test { required int64 int64_field;}", conf); VectorizedRowBatch previous = reader.createValue(); try { int c = 0; @@ -417,13 +610,49 @@ protected void longRead(boolean isDictionaryEncoding) throws Exception { } protected void doubleRead(boolean isDictionaryEncoding) throws Exception { - Configuration conf = new Configuration(); - conf.set(IOConstants.COLUMNS, "double_field"); - conf.set(IOConstants.COLUMNS_TYPES, "double"); - conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); - conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + Configuration c = new Configuration(); + c.set(IOConstants.COLUMNS, "double_field"); + c.set(IOConstants.COLUMNS_TYPES, "double"); + c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + doubleRead(isDictionaryEncoding, c); + } + + protected void stringReadDouble(boolean isDictionaryEncoding) throws Exception { + Configuration reader_conf = new Configuration(); + reader_conf.set(IOConstants.COLUMNS, "double_field"); + reader_conf.set(IOConstants.COLUMNS_TYPES, "string"); + reader_conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + reader_conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); VectorizedParquetRecordReader reader = - createTestParquetReader("message test { required double double_field;}", conf); + createTestParquetReader("message test { required double double_field;}", reader_conf); + VectorizedRowBatch previous = reader.createValue(); + try { + int c = 0; + while (reader.next(NullWritable.get(), previous)) { + BytesColumnVector vector = (BytesColumnVector) previous.cols[0]; + assertTrue(vector.noNulls); + for (int i = 0; i < vector.vector.length; i++) { + if (c == nElements) { + break; + } + String actual = new String(Arrays.copyOfRange(vector.vector[i], vector.start[i], vector + .start[i] + vector.length[i])); + assertEquals("Failed at " + c, String.valueOf(getDoubleValue(isDictionaryEncoding, c)), + actual); + assertFalse(vector.isNull[i]); + c++; + } + } + assertEquals(nElements, c); + } finally { + reader.close(); + } + } + + private void doubleRead(boolean isDictionaryEncoding, Configuration conf) throws Exception { + VectorizedParquetRecordReader reader = + createTestParquetReader("message test { required double double_field;}", conf); VectorizedRowBatch previous = reader.createValue(); try { int c = 0; @@ -435,7 +664,7 @@ protected void doubleRead(boolean isDictionaryEncoding) throws Exception { break; } assertEquals("Failed at " + c, getDoubleValue(isDictionaryEncoding, c), vector.vector[i], - 0); + 0); assertFalse(vector.isNull[i]); c++; } @@ -447,13 +676,26 @@ protected void doubleRead(boolean isDictionaryEncoding) throws Exception { } protected void floatRead(boolean isDictionaryEncoding) throws Exception { - Configuration conf = new Configuration(); - conf.set(IOConstants.COLUMNS, "float_field"); - conf.set(IOConstants.COLUMNS_TYPES, "float"); - conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); - conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + Configuration c = new Configuration(); + c.set(IOConstants.COLUMNS, "float_field"); + c.set(IOConstants.COLUMNS_TYPES, "float"); + c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + floatRead(isDictionaryEncoding, c); + } + + protected void doubleReadFloat(boolean isDictionaryEncoding) throws Exception { + Configuration c = new Configuration(); + c.set(IOConstants.COLUMNS, "float_field"); + c.set(IOConstants.COLUMNS_TYPES, "double"); + c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + floatRead(isDictionaryEncoding, c); + } + + private void floatRead(boolean isDictionaryEncoding, Configuration conf) throws Exception { VectorizedParquetRecordReader reader = - createTestParquetReader("message test { required float float_field;}", conf); + createTestParquetReader("message test { required float float_field;}", conf); VectorizedRowBatch previous = reader.createValue(); try { int c = 0; @@ -465,7 +707,7 @@ protected void floatRead(boolean isDictionaryEncoding) throws Exception { break; } assertEquals("Failed at " + c, getFloatValue(isDictionaryEncoding, c), vector.vector[i], - 0); + 0); assertFalse(vector.isNull[i]); c++; } @@ -483,7 +725,7 @@ protected void booleanRead() throws Exception { conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); VectorizedParquetRecordReader reader = - createTestParquetReader("message test { required boolean boolean_field;}", conf); + createTestParquetReader("message test { required boolean boolean_field;}", conf); VectorizedRowBatch previous = reader.createValue(); try { int c = 0; @@ -505,6 +747,38 @@ protected void booleanRead() throws Exception { } } + protected void stringReadBoolean() throws Exception { + Configuration conf = new Configuration(); + conf.set(IOConstants.COLUMNS, "boolean_field"); + conf.set(IOConstants.COLUMNS_TYPES, "string"); + conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + VectorizedParquetRecordReader reader = + createTestParquetReader("message test { required boolean boolean_field;}", conf); + VectorizedRowBatch previous = reader.createValue(); + try { + int c = 0; + while (reader.next(NullWritable.get(), previous)) { + BytesColumnVector vector = (BytesColumnVector) previous.cols[0]; + assertTrue(vector.noNulls); + for (int i = 0; i < vector.vector.length; i++) { + if (c == nElements) { + break; + } + + String actual = new String(Arrays.copyOfRange(vector.vector[i], vector.start[i], vector + .start[i] + vector.length[i])); + assertEquals("Failed at " + c, String.valueOf(getBooleanValue(c)), actual); + assertFalse(vector.isNull[i]); + c++; + } + } + assertEquals(nElements, c); + } finally { + reader.close(); + } + } + protected void binaryRead(boolean isDictionaryEncoding) throws Exception { Configuration conf = new Configuration(); conf.set(IOConstants.COLUMNS, "binary_field_some_null"); @@ -512,7 +786,7 @@ protected void binaryRead(boolean isDictionaryEncoding) throws Exception { conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); VectorizedParquetRecordReader reader = - createTestParquetReader("message test { required binary binary_field_some_null;}", conf); + createTestParquetReader("message test { required binary binary_field_some_null;}", conf); VectorizedRowBatch previous = reader.createValue(); int c = 0; try { @@ -527,7 +801,7 @@ protected void binaryRead(boolean isDictionaryEncoding) throws Exception { assertEquals("Null assert failed at " + c, isNull(c), vector.isNull[i]); if (!vector.isNull[i]) { actual = new String(ArrayUtils - .subarray(vector.vector[i], vector.start[i], vector.start[i] + vector.length[i])); + .subarray(vector.vector[i], vector.start[i], vector.start[i] + vector.length[i])); assertEquals("failed at " + c, getStr(isDictionaryEncoding, c), actual); } else { noNull = false; @@ -550,11 +824,11 @@ protected void structRead(boolean isDictionaryEncoding) throws Exception { conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); String schema = "message hive_schema {\n" - + "group struct_field {\n" - + " optional int32 a;\n" - + " optional double b;\n" - + "}\n" - + "}\n"; + + "group struct_field {\n" + + " optional int32 a;\n" + + " optional double b;\n" + + "}\n" + + "}\n"; VectorizedParquetRecordReader reader = createTestParquetReader(schema, conf); VectorizedRowBatch previous = reader.createValue(); int c = 0; @@ -588,13 +862,13 @@ protected void nestedStructRead0(boolean isDictionaryEncoding) throws Exception conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); String schema = "message hive_schema {\n" - + "group nested_struct_field {\n" - + " optional group nsf {\n" - + " optional int32 c;\n" - + " optional int32 d;\n" - + " }" - + "optional double e;\n" - + "}\n"; + + "group nested_struct_field {\n" + + " optional group nsf {\n" + + " optional int32 c;\n" + + " optional int32 d;\n" + + " }" + + "optional double e;\n" + + "}\n"; VectorizedParquetRecordReader reader = createTestParquetReader(schema, conf); VectorizedRowBatch previous = reader.createValue(); int c = 0; @@ -631,11 +905,11 @@ protected void nestedStructRead1(boolean isDictionaryEncoding) throws Exception conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); String schema = "message hive_schema {\n" - + "group nested_struct_field {\n" - + " optional group nsf {\n" - + " optional int32 c;\n" - + " }" - + "}\n"; + + "group nested_struct_field {\n" + + " optional group nsf {\n" + + " optional int32 c;\n" + + " }" + + "}\n"; VectorizedParquetRecordReader reader = createTestParquetReader(schema, conf); VectorizedRowBatch previous = reader.createValue(); int c = 0; @@ -668,10 +942,10 @@ protected void structReadSomeNull(boolean isDictionaryEncoding) throws Exception conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); String schema = "message hive_schema {\n" - + "group struct_field_some_null {\n" - + " optional int32 f;\n" - + " optional double g;\n" - + "}\n"; + + "group struct_field_some_null {\n" + + " optional int32 f;\n" + + " optional double g;\n" + + "}\n"; VectorizedParquetRecordReader reader = createTestParquetReader(schema, conf); VectorizedRowBatch previous = reader.createValue(); int c = 0; @@ -706,6 +980,40 @@ protected void structReadSomeNull(boolean isDictionaryEncoding) throws Exception } } + protected void stringReadDecimal(boolean isDictionaryEncoding) throws Exception { + Configuration conf = new Configuration(); + conf.set(IOConstants.COLUMNS, "value"); + conf.set(IOConstants.COLUMNS_TYPES, "string"); + conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); + conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); + VectorizedParquetRecordReader reader = + createTestParquetReader("message hive_schema { required value (DECIMAL(5,2));}", conf); + VectorizedRowBatch previous = reader.createValue(); + try { + int c = 0; + while (reader.next(NullWritable.get(), previous)) { + BytesColumnVector vector = (BytesColumnVector) previous.cols[0]; + assertTrue(vector.noNulls); + for (int i = 0; i < vector.vector.length; i++) { + if (c == nElements) { + break; + } + + String actual = new String(Arrays.copyOfRange(vector.vector[i], vector.start[i], vector + .start[i] + vector.length[i])); + assertEquals("Check failed at pos " + c, getDecimal(isDictionaryEncoding, c).toString(), + actual); + + assertFalse(vector.isNull[i]); + c++; + } + } + assertEquals(nElements, c); + } finally { + reader.close(); + } + } + protected void decimalRead(boolean isDictionaryEncoding) throws Exception { Configuration conf = new Configuration(); conf.set(IOConstants.COLUMNS, "value"); @@ -713,7 +1021,7 @@ protected void decimalRead(boolean isDictionaryEncoding) throws Exception { conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false); conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0"); VectorizedParquetRecordReader reader = - createTestParquetReader("message hive_schema { required value (DECIMAL(5,2));}", conf); + createTestParquetReader("message hive_schema { required value (DECIMAL(5,2));}", conf); VectorizedRowBatch previous = reader.createValue(); try { int c = 0; @@ -725,7 +1033,8 @@ protected void decimalRead(boolean isDictionaryEncoding) throws Exception { break; } assertEquals("Check failed at pos " + c, getDecimal(isDictionaryEncoding, c), - vector.vector[i].getHiveDecimal()); + vector.vector[i].getHiveDecimal()); + assertFalse(vector.isNull[i]); c++; } diff --git a/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q b/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q new file mode 100644 index 0000000..6b706ab --- /dev/null +++ b/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q @@ -0,0 +1,94 @@ +set hive.fetch.task.conversion=none; +set hive.vectorized.execution.enabled=true; +set parquet.enable.dictionary=true; + +drop table test_alter; +drop table test_alter2; +drop table test_alter3; + +create table test_alter (id string) stored as parquet; +insert into test_alter values ('1'), ('2'), ('3'); +select * from test_alter; + +-- add new column -> empty col values should return NULL +alter table test_alter add columns (newCol string); +select * from test_alter; + +-- insert data into new column -> New data should be returned +insert into test_alter values ('4', '100'); +select * from test_alter; + +-- remove the newly added column +-- this works in vectorized execution +alter table test_alter replace columns (id string); +select * from test_alter; + +-- add column using replace column syntax +alter table test_alter replace columns (id string, id2 string); +-- this surprisingly doesn't return the 100 added to 4th row above +select * from test_alter; +insert into test_alter values ('5', '100'); +select * from test_alter; + +-- use the same column name and datatype +alter table test_alter replace columns (id string, id2 string); +select * from test_alter; + +-- change string to char +alter table test_alter replace columns (id char(10), id2 string); +select * from test_alter; + +-- change string to varchar +alter table test_alter replace columns (id string, id2 string); +alter table test_alter replace columns (id varchar(10), id2 string); +select * from test_alter; + +-- change columntype and column name +alter table test_alter replace columns (id string, id2 string); +alter table test_alter replace columns (idv varchar(10), id2 string); +select * from test_alter; + +-- test int to long type conversion +create table test_alter2 (id int) stored as parquet; +insert into test_alter2 values (1); +alter table test_alter2 replace columns (id bigint); +select * from test_alter2; + +-- test float to double type conversion +drop table test_alter2; +create table test_alter2 (id float) stored as parquet; +insert into test_alter2 values (1.5); +alter table test_alter2 replace columns (id double); +select * from test_alter2; + +drop table test_alter2; +create table test_alter2 (ts timestamp) stored as parquet; +insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456'); +select * from test_alter2; +alter table test_alter2 replace columns (ts string); +select * from test_alter2; + +drop table test_alter2; +create table test_alter2 (ts timestamp) stored as parquet; +insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456'); +select * from test_alter2; +alter table test_alter2 replace columns (ts varchar(19)); +-- this should truncate the microseconds +select * from test_alter2; + +drop table test_alter2; +create table test_alter2 (ts timestamp) stored as parquet; +insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456'); +select * from test_alter2; +alter table test_alter2 replace columns (ts char(25)); +select * from test_alter2; + +-- test integer types upconversion +create table test_alter3 (id1 tinyint, id2 smallint, id3 int, id4 bigint) stored as parquet; +insert into test_alter3 values (10, 20, 30, 40); +alter table test_alter3 replace columns (id1 smallint, id2 int, id3 bigint, id4 decimal(10,4)); +-- this fails mostly due to bigint to decimal +-- select * from test_alter3; +select id1, id2, id3 from test_alter3; + + diff --git a/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_non_dictionary_encoding.q b/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_non_dictionary_encoding.q new file mode 100644 index 0000000..3006bd4 --- /dev/null +++ b/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_non_dictionary_encoding.q @@ -0,0 +1,94 @@ +set hive.fetch.task.conversion=none; +set hive.vectorized.execution.enabled=true; +set parquet.enable.dictionary=false; + +drop table test_alter; +drop table test_alter2; +drop table test_alter3; + +create table test_alter (id string) stored as parquet; +insert into test_alter values ('1'), ('2'), ('3'); +select * from test_alter; + +-- add new column -> empty col values should return NULL +alter table test_alter add columns (newCol string); +select * from test_alter; + +-- insert data into new column -> New data should be returned +insert into test_alter values ('4', '100'); +select * from test_alter; + +-- remove the newly added column +-- this works in vectorized execution +alter table test_alter replace columns (id string); +select * from test_alter; + +-- add column using replace column syntax +alter table test_alter replace columns (id string, id2 string); +-- this surprisingly doesn't return the 100 added to 4th row above +select * from test_alter; +insert into test_alter values ('5', '100'); +select * from test_alter; + +-- use the same column name and datatype +alter table test_alter replace columns (id string, id2 string); +select * from test_alter; + +-- change string to char +alter table test_alter replace columns (id char(10), id2 string); +select * from test_alter; + +-- change string to varchar +alter table test_alter replace columns (id string, id2 string); +alter table test_alter replace columns (id varchar(10), id2 string); +select * from test_alter; + +-- change columntype and column name +alter table test_alter replace columns (id string, id2 string); +alter table test_alter replace columns (idv varchar(10), id2 string); +select * from test_alter; + +-- test int to long type conversion +create table test_alter2 (id int) stored as parquet; +insert into test_alter2 values (1); +alter table test_alter2 replace columns (id bigint); +select * from test_alter2; + +-- test float to double type conversion +drop table test_alter2; +create table test_alter2 (id float) stored as parquet; +insert into test_alter2 values (1.5); +alter table test_alter2 replace columns (id double); +select * from test_alter2; + +drop table test_alter2; +create table test_alter2 (ts timestamp) stored as parquet; +insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456'); +select * from test_alter2; +alter table test_alter2 replace columns (ts string); +select * from test_alter2; + +drop table test_alter2; +create table test_alter2 (ts timestamp) stored as parquet; +insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456'); +select * from test_alter2; +alter table test_alter2 replace columns (ts varchar(19)); +-- this should truncate the microseconds +select * from test_alter2; + +drop table test_alter2; +create table test_alter2 (ts timestamp) stored as parquet; +insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456'); +select * from test_alter2; +alter table test_alter2 replace columns (ts char(25)); +select * from test_alter2; + +-- test integer types upconversion +create table test_alter3 (id1 tinyint, id2 smallint, id3 int, id4 bigint) stored as parquet; +insert into test_alter3 values (10, 20, 30, 40); +alter table test_alter3 replace columns (id1 smallint, id2 int, id3 bigint, id4 decimal(10,4)); +-- this fails mostly due to bigint to decimal +-- select * from test_alter3; +select id1, id2, id3 from test_alter3; + + diff --git a/ql/src/test/results/clientpositive/schema_evol_par_vec_table.q.out b/ql/src/test/results/clientpositive/schema_evol_par_vec_table.q.out new file mode 100644 index 0000000..a6128b6 --- /dev/null +++ b/ql/src/test/results/clientpositive/schema_evol_par_vec_table.q.out @@ -0,0 +1,357 @@ +PREHOOK: query: drop table test_alter +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table test_alter +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table test_alter3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table test_alter3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_alter (id string) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter +POSTHOOK: query: create table test_alter (id string) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter +PREHOOK: query: insert into test_alter values ('1'), ('2'), ('3') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter +POSTHOOK: query: insert into test_alter values ('1'), ('2'), ('3') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter +POSTHOOK: Lineage: test_alter.id SCRIPT [] +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 +2 +3 +PREHOOK: query: alter table test_alter add columns (newCol string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter add columns (newCol string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 NULL +2 NULL +3 NULL +PREHOOK: query: insert into test_alter values ('4', '100') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter +POSTHOOK: query: insert into test_alter values ('4', '100') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter +POSTHOOK: Lineage: test_alter.id SCRIPT [] +POSTHOOK: Lineage: test_alter.newcol SCRIPT [] +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 NULL +2 NULL +3 NULL +4 100 +PREHOOK: query: alter table test_alter replace columns (id string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 +2 +3 +4 +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: insert into test_alter values ('5', '100') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter +POSTHOOK: query: insert into test_alter values ('5', '100') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter +POSTHOOK: Lineage: test_alter.id SCRIPT [] +POSTHOOK: Lineage: test_alter.id2 SCRIPT [] +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id char(10), id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id char(10), id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: alter table test_alter replace columns (id varchar(10), id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id varchar(10), id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: alter table test_alter replace columns (idv varchar(10), id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (idv varchar(10), id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +NULL 100 +NULL NULL +NULL NULL +NULL NULL +NULL NULL +PREHOOK: query: create table test_alter2 (id int) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (id int) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values (1) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values (1) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.id SCRIPT [] +PREHOOK: query: alter table test_alter2 replace columns (id bigint) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (id bigint) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +1 +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: create table test_alter2 (id float) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (id float) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values (1.5) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values (1.5) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.id SCRIPT [] +PREHOOK: query: alter table test_alter2 replace columns (id double) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (id double) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +1.5 +PREHOOK: query: create table test_alter3 (id1 tinyint, id2 smallint, id3 int, id4 bigint) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter3 +POSTHOOK: query: create table test_alter3 (id1 tinyint, id2 smallint, id3 int, id4 bigint) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter3 +PREHOOK: query: insert into test_alter3 values (10, 20, 30, 40) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter3 +POSTHOOK: query: insert into test_alter3 values (10, 20, 30, 40) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter3 +POSTHOOK: Lineage: test_alter3.id1 SCRIPT [] +POSTHOOK: Lineage: test_alter3.id2 SCRIPT [] +POSTHOOK: Lineage: test_alter3.id3 SCRIPT [] +POSTHOOK: Lineage: test_alter3.id4 SCRIPT [] +PREHOOK: query: alter table test_alter3 replace columns (id1 smallint, id2 int, id3 bigint, id4 decimal(10,4)) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter3 +PREHOOK: Output: default@test_alter3 +POSTHOOK: query: alter table test_alter3 replace columns (id1 smallint, id2 int, id3 bigint, id4 decimal(10,4)) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter3 +POSTHOOK: Output: default@test_alter3 +PREHOOK: query: select id1, id2, id3 from test_alter3 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter3 +#### A masked pattern was here #### +POSTHOOK: query: select id1, id2, id3 from test_alter3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter3 +#### A masked pattern was here #### +10 20 30 diff --git a/ql/src/test/results/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q.out b/ql/src/test/results/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q.out new file mode 100644 index 0000000..1d2f36d --- /dev/null +++ b/ql/src/test/results/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q.out @@ -0,0 +1,522 @@ +PREHOOK: query: drop table test_alter +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table test_alter +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table test_alter3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table test_alter3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_alter (id string) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter +POSTHOOK: query: create table test_alter (id string) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter +PREHOOK: query: insert into test_alter values ('1'), ('2'), ('3') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter +POSTHOOK: query: insert into test_alter values ('1'), ('2'), ('3') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter +POSTHOOK: Lineage: test_alter.id SCRIPT [] +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 +2 +3 +PREHOOK: query: alter table test_alter add columns (newCol string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter add columns (newCol string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 NULL +2 NULL +3 NULL +PREHOOK: query: insert into test_alter values ('4', '100') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter +POSTHOOK: query: insert into test_alter values ('4', '100') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter +POSTHOOK: Lineage: test_alter.id SCRIPT [] +POSTHOOK: Lineage: test_alter.newcol SCRIPT [] +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 NULL +2 NULL +3 NULL +4 100 +PREHOOK: query: alter table test_alter replace columns (id string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 +2 +3 +4 +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: insert into test_alter values ('5', '100') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter +POSTHOOK: query: insert into test_alter values ('5', '100') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter +POSTHOOK: Lineage: test_alter.id SCRIPT [] +POSTHOOK: Lineage: test_alter.id2 SCRIPT [] +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id char(10), id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id char(10), id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: alter table test_alter replace columns (id varchar(10), id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id varchar(10), id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: alter table test_alter replace columns (idv varchar(10), id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (idv varchar(10), id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +NULL 100 +NULL NULL +NULL NULL +NULL NULL +NULL NULL +PREHOOK: query: create table test_alter2 (id int) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (id int) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values (1) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values (1) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.id SCRIPT [] +PREHOOK: query: alter table test_alter2 replace columns (id bigint) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (id bigint) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +1 +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: create table test_alter2 (id float) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (id float) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values (1.5) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values (1.5) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.id SCRIPT [] +PREHOOK: query: alter table test_alter2 replace columns (id double) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (id double) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +1.5 +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.ts SCRIPT [] +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15.123456 +2018-01-02 14:15:16.123456 +2018-01-03 16:17:18.123456 +PREHOOK: query: alter table test_alter2 replace columns (ts string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (ts string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15.123456 +2018-01-02 14:15:16.123456 +2018-01-03 16:17:18.123456 +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.ts SCRIPT [] +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15.123456 +2018-01-02 14:15:16.123456 +2018-01-03 16:17:18.123456 +PREHOOK: query: alter table test_alter2 replace columns (ts varchar(19)) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (ts varchar(19)) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15 +2018-01-02 14:15:16 +2018-01-03 16:17:18 +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.ts SCRIPT [] +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15.123456 +2018-01-02 14:15:16.123456 +2018-01-03 16:17:18.123456 +PREHOOK: query: alter table test_alter2 replace columns (ts char(25)) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (ts char(25)) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15.12345 +2018-01-02 14:15:16.12345 +2018-01-03 16:17:18.12345 +PREHOOK: query: create table test_alter3 (id1 tinyint, id2 smallint, id3 int, id4 bigint) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter3 +POSTHOOK: query: create table test_alter3 (id1 tinyint, id2 smallint, id3 int, id4 bigint) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter3 +PREHOOK: query: insert into test_alter3 values (10, 20, 30, 40) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter3 +POSTHOOK: query: insert into test_alter3 values (10, 20, 30, 40) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter3 +POSTHOOK: Lineage: test_alter3.id1 SCRIPT [] +POSTHOOK: Lineage: test_alter3.id2 SCRIPT [] +POSTHOOK: Lineage: test_alter3.id3 SCRIPT [] +POSTHOOK: Lineage: test_alter3.id4 SCRIPT [] +PREHOOK: query: alter table test_alter3 replace columns (id1 smallint, id2 int, id3 bigint, id4 decimal(10,4)) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter3 +PREHOOK: Output: default@test_alter3 +POSTHOOK: query: alter table test_alter3 replace columns (id1 smallint, id2 int, id3 bigint, id4 decimal(10,4)) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter3 +POSTHOOK: Output: default@test_alter3 +PREHOOK: query: select id1, id2, id3 from test_alter3 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter3 +#### A masked pattern was here #### +POSTHOOK: query: select id1, id2, id3 from test_alter3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter3 +#### A masked pattern was here #### +10 20 30 diff --git a/ql/src/test/results/clientpositive/schema_evol_par_vec_table_non_dictionary_encoding.q.out b/ql/src/test/results/clientpositive/schema_evol_par_vec_table_non_dictionary_encoding.q.out new file mode 100644 index 0000000..1d2f36d --- /dev/null +++ b/ql/src/test/results/clientpositive/schema_evol_par_vec_table_non_dictionary_encoding.q.out @@ -0,0 +1,522 @@ +PREHOOK: query: drop table test_alter +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table test_alter +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table test_alter3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table test_alter3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_alter (id string) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter +POSTHOOK: query: create table test_alter (id string) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter +PREHOOK: query: insert into test_alter values ('1'), ('2'), ('3') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter +POSTHOOK: query: insert into test_alter values ('1'), ('2'), ('3') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter +POSTHOOK: Lineage: test_alter.id SCRIPT [] +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 +2 +3 +PREHOOK: query: alter table test_alter add columns (newCol string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter add columns (newCol string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 NULL +2 NULL +3 NULL +PREHOOK: query: insert into test_alter values ('4', '100') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter +POSTHOOK: query: insert into test_alter values ('4', '100') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter +POSTHOOK: Lineage: test_alter.id SCRIPT [] +POSTHOOK: Lineage: test_alter.newcol SCRIPT [] +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 NULL +2 NULL +3 NULL +4 100 +PREHOOK: query: alter table test_alter replace columns (id string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 +2 +3 +4 +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: insert into test_alter values ('5', '100') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter +POSTHOOK: query: insert into test_alter values ('5', '100') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter +POSTHOOK: Lineage: test_alter.id SCRIPT [] +POSTHOOK: Lineage: test_alter.id2 SCRIPT [] +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id char(10), id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id char(10), id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: alter table test_alter replace columns (id varchar(10), id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id varchar(10), id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +5 100 +1 NULL +2 NULL +3 NULL +4 NULL +PREHOOK: query: alter table test_alter replace columns (id string, id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (id string, id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: alter table test_alter replace columns (idv varchar(10), id2 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter +PREHOOK: Output: default@test_alter +POSTHOOK: query: alter table test_alter replace columns (idv varchar(10), id2 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter +POSTHOOK: Output: default@test_alter +PREHOOK: query: select * from test_alter +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter +#### A masked pattern was here #### +NULL 100 +NULL NULL +NULL NULL +NULL NULL +NULL NULL +PREHOOK: query: create table test_alter2 (id int) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (id int) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values (1) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values (1) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.id SCRIPT [] +PREHOOK: query: alter table test_alter2 replace columns (id bigint) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (id bigint) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +1 +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: create table test_alter2 (id float) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (id float) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values (1.5) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values (1.5) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.id SCRIPT [] +PREHOOK: query: alter table test_alter2 replace columns (id double) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (id double) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +1.5 +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.ts SCRIPT [] +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15.123456 +2018-01-02 14:15:16.123456 +2018-01-03 16:17:18.123456 +PREHOOK: query: alter table test_alter2 replace columns (ts string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (ts string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15.123456 +2018-01-02 14:15:16.123456 +2018-01-03 16:17:18.123456 +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.ts SCRIPT [] +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15.123456 +2018-01-02 14:15:16.123456 +2018-01-03 16:17:18.123456 +PREHOOK: query: alter table test_alter2 replace columns (ts varchar(19)) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (ts varchar(19)) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15 +2018-01-02 14:15:16 +2018-01-03 16:17:18 +PREHOOK: query: drop table test_alter2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: drop table test_alter2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: create table test_alter2 (ts timestamp) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter2 +POSTHOOK: Lineage: test_alter2.ts SCRIPT [] +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15.123456 +2018-01-02 14:15:16.123456 +2018-01-03 16:17:18.123456 +PREHOOK: query: alter table test_alter2 replace columns (ts char(25)) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter2 +PREHOOK: Output: default@test_alter2 +POSTHOOK: query: alter table test_alter2 replace columns (ts char(25)) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter2 +POSTHOOK: Output: default@test_alter2 +PREHOOK: query: select * from test_alter2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +POSTHOOK: query: select * from test_alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter2 +#### A masked pattern was here #### +2018-01-01 13:14:15.12345 +2018-01-02 14:15:16.12345 +2018-01-03 16:17:18.12345 +PREHOOK: query: create table test_alter3 (id1 tinyint, id2 smallint, id3 int, id4 bigint) stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_alter3 +POSTHOOK: query: create table test_alter3 (id1 tinyint, id2 smallint, id3 int, id4 bigint) stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_alter3 +PREHOOK: query: insert into test_alter3 values (10, 20, 30, 40) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_alter3 +POSTHOOK: query: insert into test_alter3 values (10, 20, 30, 40) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_alter3 +POSTHOOK: Lineage: test_alter3.id1 SCRIPT [] +POSTHOOK: Lineage: test_alter3.id2 SCRIPT [] +POSTHOOK: Lineage: test_alter3.id3 SCRIPT [] +POSTHOOK: Lineage: test_alter3.id4 SCRIPT [] +PREHOOK: query: alter table test_alter3 replace columns (id1 smallint, id2 int, id3 bigint, id4 decimal(10,4)) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@test_alter3 +PREHOOK: Output: default@test_alter3 +POSTHOOK: query: alter table test_alter3 replace columns (id1 smallint, id2 int, id3 bigint, id4 decimal(10,4)) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@test_alter3 +POSTHOOK: Output: default@test_alter3 +PREHOOK: query: select id1, id2, id3 from test_alter3 +PREHOOK: type: QUERY +PREHOOK: Input: default@test_alter3 +#### A masked pattern was here #### +POSTHOOK: query: select id1, id2, id3 from test_alter3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_alter3 +#### A masked pattern was here #### +10 20 30