diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index 08de4a7..cce652f 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -753,7 +753,10 @@ minillaplocal.query.files=\ vector_like_2.q,\ vector_llap_text_1.q,\ vector_mapjoin_reduce.q,\ + vector_null_map.q,\ vector_number_compare_projection.q,\ + vector_orc_merge_incompat_schema.q,\ + vector_orc_null_check.q,\ vector_order_null.q,\ vector_outer_reference_windowed.q,\ vector_partitioned_date_time.q,\ diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java index bd594e6..6926c95 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java @@ -23,6 +23,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Map; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; @@ -44,6 +45,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.SettableUnionObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableBinaryObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableBooleanObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableByteObjectInspector; @@ -61,10 +63,12 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableStringObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector; +import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo; import org.apache.hadoop.io.Text; import org.apache.hive.common.util.DateUtils; @@ -1431,11 +1435,13 @@ private static VectorExpressionWriter genVectorExpressionWritableList( SettableListObjectInspector fieldObjInspector) throws HiveException { return new VectorExpressionWriterList() { + private Object obj; private VectorExtractRow vectorExtractRow; private ListTypeInfo listTypeInfo; public VectorExpressionWriter init(SettableListObjectInspector objInspector) throws HiveException { super.init(objInspector); + obj = initValue(null); vectorExtractRow = new VectorExtractRow(); listTypeInfo = (ListTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(objInspector.getTypeName()); @@ -1450,24 +1456,43 @@ public Object initValue(Object ignored) { @Override public Object writeValue(ColumnVector column, int row) throws HiveException { - return setValue(null, column, row); + final ListColumnVector listColVector = (ListColumnVector) column; + final SettableListObjectInspector listOI = + (SettableListObjectInspector) this.objectInspector; + final List value = (List) vectorExtractRow.extractRowColumn(listColVector, + listTypeInfo, listOI, row); + if (value == null) { + return null; + } + + listOI.resize(obj, value.size()); + for (int i = 0; i < value.size(); i++) { + listOI.set(obj, i, value.get(i)); + } + return obj; } @Override - public Object setValue(Object row, ColumnVector column, int columnRow) + public Object setValue(Object list, ColumnVector column, int row) throws HiveException { + if (list == null) { + list = initValue(null); + } + final ListColumnVector listColVector = (ListColumnVector) column; final SettableListObjectInspector listOI = (SettableListObjectInspector) this.objectInspector; - final List value = (List)vectorExtractRow.extractRowColumn(listColVector, - listTypeInfo, listOI, columnRow); - if (null == row) { - row = ((SettableListObjectInspector) this.objectInspector).create(value.size()); + final List value = (List) vectorExtractRow.extractRowColumn(listColVector, + listTypeInfo, listOI, row); + if (value == null) { + return null; } + + listOI.resize(list, value.size()); for (int i = 0; i < value.size(); i++) { - listOI.set(row, i, value.get(i)); + listOI.set(list, i, value.get(i)); } - return row; + return list; } }.init(fieldObjInspector); @@ -1478,10 +1503,15 @@ private static VectorExpressionWriter genVectorExpressionWritableMap( return new VectorExpressionWriterMap() { private Object obj; + private VectorExtractRow vectorExtractRow; + private MapTypeInfo mapTypeInfo; public VectorExpressionWriter init(SettableMapObjectInspector objInspector) throws HiveException { super.init(objInspector); obj = initValue(null); + vectorExtractRow = new VectorExtractRow(); + mapTypeInfo = (MapTypeInfo) + TypeInfoUtils.getTypeInfoFromTypeString(objInspector.getTypeName()); return this; } @@ -1493,13 +1523,45 @@ public Object initValue(Object ignored) { @Override public Object writeValue(ColumnVector column, int row) throws HiveException { - throw new HiveException("Not implemented yet"); + final MapColumnVector mapColVector = (MapColumnVector) column; + final SettableMapObjectInspector mapOI = + (SettableMapObjectInspector) this.objectInspector; + final Map value = + (Map) vectorExtractRow.extractRowColumn( + mapColVector, mapTypeInfo, mapOI, row); + if (value == null) { + return null; + } + + mapOI.clear(obj); + for (Map.Entry entry : value.entrySet()) { + mapOI.put(obj, entry.getKey(), entry.getValue()); + } + return obj; } @Override - public Object setValue(Object row, ColumnVector column, int columnRow) + public Object setValue(Object map, ColumnVector column, int row) throws HiveException { - throw new HiveException("Not implemented yet"); + if (map == null) { + map = initValue(null); + } + + final MapColumnVector mapColVector = (MapColumnVector) column; + final SettableMapObjectInspector mapOI = + (SettableMapObjectInspector) this.objectInspector; + final Map value = + (Map) vectorExtractRow.extractRowColumn( + mapColVector, mapTypeInfo, mapOI, row); + if (value == null) { + return null; + } + + mapOI.clear(map); + for (Map.Entry entry : value.entrySet()) { + mapOI.put(map, entry.getKey(), entry.getValue()); + } + return map; } }.init(fieldObjInspector); } @@ -1546,9 +1608,9 @@ public Object writeValue(ColumnVector column, int row) throws HiveException { } @Override - public Object setValue(Object field, ColumnVector column, int row) throws HiveException { - if (null == field) { - field = initValue(null); + public Object setValue(Object struct, ColumnVector column, int row) throws HiveException { + if (struct == null) { + struct = initValue(null); } final StructColumnVector structColVector = (StructColumnVector) column; @@ -1562,9 +1624,9 @@ public Object setValue(Object field, ColumnVector column, int row) throws HiveEx final StructField structField = fields.get(i); final Object value = vectorExtractRow.extractRowColumn(structColVector.fields[i], fieldTypeInfos.get(i), structField.getFieldObjectInspector(), row); - structOI.setStructFieldData(obj, structField, value); + structOI.setStructFieldData(struct, structField, value); } - return field; + return struct; } }.init(fieldObjInspector); } @@ -1574,10 +1636,15 @@ private static VectorExpressionWriter genVectorExpressionWritableUnion( return new VectorExpressionWriterMap() { private Object obj; + private VectorExtractRow vectorExtractRow; + private UnionTypeInfo unionTypeInfo; public VectorExpressionWriter init(SettableUnionObjectInspector objInspector) throws HiveException { super.init(objInspector); obj = initValue(null); + vectorExtractRow = new VectorExtractRow(); + unionTypeInfo = (UnionTypeInfo) + TypeInfoUtils.getTypeInfoFromTypeString(objInspector.getTypeName()); return this; } @@ -1589,13 +1656,46 @@ public Object initValue(Object ignored) { @Override public Object writeValue(ColumnVector column, int row) throws HiveException { - throw new HiveException("Not implemented yet"); + + final UnionColumnVector unionColumnVector = (UnionColumnVector) column; + final int tag = unionColumnVector.tags[row]; + final SettableUnionObjectInspector unionOI = + (SettableUnionObjectInspector) this.objectInspector; + ObjectInspector fieldOI = unionOI.getObjectInspectors().get(tag); + ColumnVector fieldColVector = unionColumnVector.fields[tag]; + final Object value = + vectorExtractRow.extractRowColumn( + fieldColVector, unionTypeInfo.getAllUnionObjectTypeInfos().get(tag), fieldOI, row); + if (value == null) { + return null; + } + + unionOI.setFieldAndTag(obj, value, (byte) tag); + return obj; } @Override - public Object setValue(Object row, ColumnVector column, int columnRow) + public Object setValue(Object union, ColumnVector column, int row) throws HiveException { - throw new HiveException("Not implemented yet"); + if (union == null) { + union = initValue(null); + } + + final UnionColumnVector unionColumnVector = (UnionColumnVector) column; + final int tag = unionColumnVector.tags[row]; + final SettableUnionObjectInspector unionOI = + (SettableUnionObjectInspector) this.objectInspector; + ObjectInspector fieldOI = unionOI.getObjectInspectors().get(tag); + ColumnVector fieldColVector = unionColumnVector.fields[tag]; + final Object value = + vectorExtractRow.extractRowColumn( + fieldColVector, unionTypeInfo.getAllUnionObjectTypeInfos().get(tag), fieldOI, row); + if (value == null) { + return null; + } + + unionOI.setFieldAndTag(union, value, (byte) tag); + return union; } }.init(fieldObjInspector); } diff --git ql/src/test/queries/clientpositive/nullMap.q ql/src/test/queries/clientpositive/null_map.q similarity index 85% rename from ql/src/test/queries/clientpositive/nullMap.q rename to ql/src/test/queries/clientpositive/null_map.q index d2784b7..f272bb9 100644 --- ql/src/test/queries/clientpositive/nullMap.q +++ ql/src/test/queries/clientpositive/null_map.q @@ -1,3 +1,5 @@ +SET hive.vectorized.execution.enabled=false; + create table map_txt ( id int, content map diff --git ql/src/test/queries/clientpositive/orc_merge_incompat_schema.q ql/src/test/queries/clientpositive/orc_merge_incompat_schema.q index 098b41e..2396194 100644 --- ql/src/test/queries/clientpositive/orc_merge_incompat_schema.q +++ ql/src/test/queries/clientpositive/orc_merge_incompat_schema.q @@ -1,3 +1,5 @@ +SET hive.vectorized.execution.enabled=false; + set hive.metastore.disallow.incompatible.col.type.changes=false; CREATE TABLE orc_create_staging ( diff --git ql/src/test/queries/clientpositive/orc_null_check.q ql/src/test/queries/clientpositive/orc_null_check.q index 2cb1190..e5453fc 100644 --- ql/src/test/queries/clientpositive/orc_null_check.q +++ ql/src/test/queries/clientpositive/orc_null_check.q @@ -1,3 +1,5 @@ +SET hive.vectorized.execution.enabled=false; + create table listtable(l array); create table listtable_orc(l array) stored as orc; diff --git ql/src/test/queries/clientpositive/vector_null_map.q ql/src/test/queries/clientpositive/vector_null_map.q new file mode 100644 index 0000000..bda6705 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_null_map.q @@ -0,0 +1,21 @@ +SET hive.vectorized.execution.enabled=true; +set hive.fetch.task.conversion=none; + +create table map_txt ( + id int, + content map +) +row format delimited +null defined as '\\N' +stored as textfile +; + +LOAD DATA LOCAL INPATH '../../data/files/mapNull.txt' INTO TABLE map_txt; + +explain vectorization expression +select * from map_txt; +select * from map_txt; + +explain vectorization expression +select id, map_keys(content) from map_txt; +select id, map_keys(content) from map_txt; diff --git ql/src/test/queries/clientpositive/vector_orc_merge_incompat_schema.q ql/src/test/queries/clientpositive/vector_orc_merge_incompat_schema.q new file mode 100644 index 0000000..b03a084 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_orc_merge_incompat_schema.q @@ -0,0 +1,53 @@ +SET hive.vectorized.execution.enabled=true; + +set hive.metastore.disallow.incompatible.col.type.changes=false; + +CREATE TABLE orc_create_staging ( + str STRING, + mp MAP, + lst ARRAY, + strct STRUCT +) ROW FORMAT DELIMITED + FIELDS TERMINATED BY '|' + COLLECTION ITEMS TERMINATED BY ',' + MAP KEYS TERMINATED BY ':'; + +LOAD DATA LOCAL INPATH '../../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging; + +CREATE TABLE orc_create_complex ( + str STRING, + mp MAP, + lst ARRAY, + strct STRUCT, + val INT +) STORED AS ORC tblproperties("orc.row.index.stride"="1000", "orc.stripe.size"="1000", "orc.compress.size"="10000"); + +INSERT OVERWRITE TABLE orc_create_complex SELECT str,mp,lst,strct,0 FROM orc_create_staging; +INSERT INTO TABLE orc_create_complex SELECT str,mp,lst,strct,0 FROM orc_create_staging; + +dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/orc_create_complex/; +select sum(hash(*)) from orc_create_complex; + +-- will be merged as the schema is the same +ALTER TABLE orc_create_complex CONCATENATE; + +dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/orc_create_complex/; +select sum(hash(*)) from orc_create_complex; + +ALTER TABLE orc_create_complex +CHANGE COLUMN strct strct STRUCT; + +EXPLAIN VECTORIZATION +INSERT INTO TABLE orc_create_complex SELECT str,mp,lst,NAMED_STRUCT('A',strct.A,'B',strct.B,'C','c'),0 FROM orc_create_staging; +INSERT INTO TABLE orc_create_complex SELECT str,mp,lst,NAMED_STRUCT('A',strct.A,'B',strct.B,'C','c'),0 FROM orc_create_staging; + +dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/orc_create_complex/; +EXPLAIN VECTORIZATION +select sum(hash(*)) from orc_create_complex; +select sum(hash(*)) from orc_create_complex; + +-- schema is different for both files, will not be merged +ALTER TABLE orc_create_complex CONCATENATE; + +dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/orc_create_complex/; +select sum(hash(*)) from orc_create_complex; diff --git ql/src/test/queries/clientpositive/vector_orc_null_check.q ql/src/test/queries/clientpositive/vector_orc_null_check.q new file mode 100644 index 0000000..8f415c3 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_orc_null_check.q @@ -0,0 +1,13 @@ +SET hive.vectorized.execution.enabled=true; +set hive.fetch.task.conversion=none; + +create table listtable(l array); +create table listtable_orc(l array) stored as orc; + +insert overwrite table listtable select array(null) from src; +insert overwrite table listtable_orc select * from listtable; + +explain vectorization expression +select size(l) from listtable_orc limit 10; +select size(l) from listtable_orc limit 10; + diff --git ql/src/test/results/clientpositive/llap/vector_null_map.q.out ql/src/test/results/clientpositive/llap/vector_null_map.q.out new file mode 100644 index 0000000..666f7fd --- /dev/null +++ ql/src/test/results/clientpositive/llap/vector_null_map.q.out @@ -0,0 +1,173 @@ +PREHOOK: query: create table map_txt ( + id int, + content map +) +row format delimited +null defined as '\\N' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@map_txt +POSTHOOK: query: create table map_txt ( + id int, + content map +) +row format delimited +null defined as '\\N' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@map_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/mapNull.txt' INTO TABLE map_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@map_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/mapNull.txt' INTO TABLE map_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@map_txt +PREHOOK: query: explain vectorization expression +select * from map_txt +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select * from map_txt +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: map_txt + Statistics: Num rows: 1 Data size: 744 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: id (type: int), content (type: map) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 1 Data size: 744 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 744 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select * from map_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@map_txt +#### A masked pattern was here #### +POSTHOOK: query: select * from map_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@map_txt +#### A masked pattern was here #### +1 NULL +PREHOOK: query: explain vectorization expression +select id, map_keys(content) from map_txt +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select id, map_keys(content) from map_txt +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: map_txt + Statistics: Num rows: 1 Data size: 744 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: id (type: int), map_keys(content) (type: array) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 3] + selectExpressions: VectorUDFAdaptor(map_keys(content)) -> 3:array + Statistics: Num rows: 1 Data size: 744 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 744 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select id, map_keys(content) from map_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@map_txt +#### A masked pattern was here #### +POSTHOOK: query: select id, map_keys(content) from map_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@map_txt +#### A masked pattern was here #### +1 [] diff --git ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out new file mode 100644 index 0000000..ebf6853 --- /dev/null +++ ql/src/test/results/clientpositive/llap/vector_orc_merge_incompat_schema.q.out @@ -0,0 +1,305 @@ +PREHOOK: query: CREATE TABLE orc_create_staging ( + str STRING, + mp MAP, + lst ARRAY, + strct STRUCT +) ROW FORMAT DELIMITED + FIELDS TERMINATED BY '|' + COLLECTION ITEMS TERMINATED BY ',' + MAP KEYS TERMINATED BY ':' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@orc_create_staging +POSTHOOK: query: CREATE TABLE orc_create_staging ( + str STRING, + mp MAP, + lst ARRAY, + strct STRUCT +) ROW FORMAT DELIMITED + FIELDS TERMINATED BY '|' + COLLECTION ITEMS TERMINATED BY ',' + MAP KEYS TERMINATED BY ':' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@orc_create_staging +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@orc_create_staging +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@orc_create_staging +PREHOOK: query: CREATE TABLE orc_create_complex ( + str STRING, + mp MAP, + lst ARRAY, + strct STRUCT, + val INT +) STORED AS ORC tblproperties("orc.row.index.stride"="1000", "orc.stripe.size"="1000", "orc.compress.size"="10000") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@orc_create_complex +POSTHOOK: query: CREATE TABLE orc_create_complex ( + str STRING, + mp MAP, + lst ARRAY, + strct STRUCT, + val INT +) STORED AS ORC tblproperties("orc.row.index.stride"="1000", "orc.stripe.size"="1000", "orc.compress.size"="10000") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@orc_create_complex +PREHOOK: query: INSERT OVERWRITE TABLE orc_create_complex SELECT str,mp,lst,strct,0 FROM orc_create_staging +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_create_staging +PREHOOK: Output: default@orc_create_complex +POSTHOOK: query: INSERT OVERWRITE TABLE orc_create_complex SELECT str,mp,lst,strct,0 FROM orc_create_staging +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_create_staging +POSTHOOK: Output: default@orc_create_complex +POSTHOOK: Lineage: orc_create_complex.lst SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:lst, type:array, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.mp SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:mp, type:map, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.str SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:str, type:string, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.strct SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:strct, type:struct, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.val SIMPLE [] +PREHOOK: query: INSERT INTO TABLE orc_create_complex SELECT str,mp,lst,strct,0 FROM orc_create_staging +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_create_staging +PREHOOK: Output: default@orc_create_complex +POSTHOOK: query: INSERT INTO TABLE orc_create_complex SELECT str,mp,lst,strct,0 FROM orc_create_staging +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_create_staging +POSTHOOK: Output: default@orc_create_complex +POSTHOOK: Lineage: orc_create_complex.lst SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:lst, type:array, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.mp SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:mp, type:map, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.str SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:str, type:string, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.strct SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:strct, type:struct, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.val SIMPLE [] +Found 2 items +#### A masked pattern was here #### +PREHOOK: query: select sum(hash(*)) from orc_create_complex +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_create_complex +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from orc_create_complex +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_create_complex +#### A masked pattern was here #### +953053114 +PREHOOK: query: ALTER TABLE orc_create_complex CONCATENATE +PREHOOK: type: ALTER_TABLE_MERGE +PREHOOK: Input: default@orc_create_complex +PREHOOK: Output: default@orc_create_complex +POSTHOOK: query: ALTER TABLE orc_create_complex CONCATENATE +POSTHOOK: type: ALTER_TABLE_MERGE +POSTHOOK: Input: default@orc_create_complex +POSTHOOK: Output: default@orc_create_complex +Found 1 items +#### A masked pattern was here #### +PREHOOK: query: select sum(hash(*)) from orc_create_complex +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_create_complex +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from orc_create_complex +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_create_complex +#### A masked pattern was here #### +953053114 +PREHOOK: query: ALTER TABLE orc_create_complex +CHANGE COLUMN strct strct STRUCT +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@orc_create_complex +PREHOOK: Output: default@orc_create_complex +POSTHOOK: query: ALTER TABLE orc_create_complex +CHANGE COLUMN strct strct STRUCT +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@orc_create_complex +POSTHOOK: Output: default@orc_create_complex +PREHOOK: query: EXPLAIN VECTORIZATION +INSERT INTO TABLE orc_create_complex SELECT str,mp,lst,NAMED_STRUCT('A',strct.A,'B',strct.B,'C','c'),0 FROM orc_create_staging +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION +INSERT INTO TABLE orc_create_complex SELECT str,mp,lst,NAMED_STRUCT('A',strct.A,'B',strct.B,'C','c'),0 FROM orc_create_staging +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + Stage-3 depends on stages: Stage-0 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orc_create_staging + Pruned Column Paths: strct.a, strct.b + Statistics: Num rows: 1 Data size: 3440 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: str (type: string), mp (type: map), lst (type: array), named_struct('A',strct.a,'B',strct.b,'C','c') (type: struct), 0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 3440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 3440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + name: default.orc_create_complex + Execution mode: llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: SELECT operator: Unable to vectorize custom UDF. Encountered unsupported expr desc : Column[strct].a + vectorized: false + + Stage: Stage-2 + Dependency Collection + + Stage: Stage-0 + Move Operator + tables: + replace: false + table: + input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + name: default.orc_create_complex + + Stage: Stage-3 + Stats Work + Basic Stats Work: + +PREHOOK: query: INSERT INTO TABLE orc_create_complex SELECT str,mp,lst,NAMED_STRUCT('A',strct.A,'B',strct.B,'C','c'),0 FROM orc_create_staging +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_create_staging +PREHOOK: Output: default@orc_create_complex +POSTHOOK: query: INSERT INTO TABLE orc_create_complex SELECT str,mp,lst,NAMED_STRUCT('A',strct.A,'B',strct.B,'C','c'),0 FROM orc_create_staging +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_create_staging +POSTHOOK: Output: default@orc_create_complex +POSTHOOK: Lineage: orc_create_complex.lst SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:lst, type:array, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.mp SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:mp, type:map, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.str SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:str, type:string, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.strct EXPRESSION [(orc_create_staging)orc_create_staging.FieldSchema(name:strct, type:struct, comment:null), ] +POSTHOOK: Lineage: orc_create_complex.val SIMPLE [] +Found 2 items +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION +select sum(hash(*)) from orc_create_complex +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION +select sum(hash(*)) from orc_create_complex +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orc_create_complex + Statistics: Num rows: 6 Data size: 21816 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(str,mp,lst,strct,val) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 6 Data size: 21816 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 3644 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 3644 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 3644 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 3644 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(hash(*)) from orc_create_complex +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_create_complex +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from orc_create_complex +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_create_complex +#### A masked pattern was here #### +4334574594 +PREHOOK: query: ALTER TABLE orc_create_complex CONCATENATE +PREHOOK: type: ALTER_TABLE_MERGE +PREHOOK: Input: default@orc_create_complex +PREHOOK: Output: default@orc_create_complex +POSTHOOK: query: ALTER TABLE orc_create_complex CONCATENATE +POSTHOOK: type: ALTER_TABLE_MERGE +POSTHOOK: Input: default@orc_create_complex +POSTHOOK: Output: default@orc_create_complex +Found 2 items +#### A masked pattern was here #### +PREHOOK: query: select sum(hash(*)) from orc_create_complex +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_create_complex +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from orc_create_complex +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_create_complex +#### A masked pattern was here #### +4334574594 diff --git ql/src/test/results/clientpositive/llap/vector_orc_null_check.q.out ql/src/test/results/clientpositive/llap/vector_orc_null_check.q.out new file mode 100644 index 0000000..4c2c4a0 --- /dev/null +++ ql/src/test/results/clientpositive/llap/vector_orc_null_check.q.out @@ -0,0 +1,121 @@ +PREHOOK: query: create table listtable(l array) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@listtable +POSTHOOK: query: create table listtable(l array) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@listtable +PREHOOK: query: create table listtable_orc(l array) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@listtable_orc +POSTHOOK: query: create table listtable_orc(l array) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@listtable_orc +PREHOOK: query: insert overwrite table listtable select array(null) from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@listtable +POSTHOOK: query: insert overwrite table listtable select array(null) from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@listtable +POSTHOOK: Lineage: listtable.l EXPRESSION [] +PREHOOK: query: insert overwrite table listtable_orc select * from listtable +PREHOOK: type: QUERY +PREHOOK: Input: default@listtable +PREHOOK: Output: default@listtable_orc +POSTHOOK: query: insert overwrite table listtable_orc select * from listtable +POSTHOOK: type: QUERY +POSTHOOK: Input: default@listtable +POSTHOOK: Output: default@listtable_orc +POSTHOOK: Lineage: listtable_orc.l SIMPLE [(listtable)listtable.FieldSchema(name:l, type:array, comment:null), ] +PREHOOK: query: explain vectorization expression +select size(l) from listtable_orc limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select size(l) from listtable_orc limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: listtable_orc + Statistics: Num rows: 500 Data size: 913920 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: size(l) (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2] + selectExpressions: VectorUDFAdaptor(size(l)) -> 2:int + Statistics: Num rows: 500 Data size: 913920 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 10 Data size: 18270 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 18270 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select size(l) from listtable_orc limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@listtable_orc +#### A masked pattern was here #### +POSTHOOK: query: select size(l) from listtable_orc limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@listtable_orc +#### A masked pattern was here #### +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 diff --git ql/src/test/results/clientpositive/nullMap.q.out ql/src/test/results/clientpositive/null_map.q.out similarity index 100% rename from ql/src/test/results/clientpositive/nullMap.q.out rename to ql/src/test/results/clientpositive/null_map.q.out