diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index d759739..7585bad 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -427,6 +427,12 @@ //{2} should be lockid LOCK_ACQUIRE_TIMEDOUT(10307, "Lock acquisition for {0} timed out after {1}ms. {2}", true), COMPILE_LOCK_TIMED_OUT(10308, "Attempt to acquire compile lock timed out.", true), + CANNOT_CHANGE_SERDE(10309, "Changing SerDe (from {0}) is not supported for table {1}. File format may be incompatible", true), + CANNOT_CHANGE_FILEFORMAT(10310, "Changing file format (from {0}) is not supported for table {1}", true), + CANNOT_REORDER_COLUMNS(10311, "Reordering columns is not supported for table {0}. SerDe may be incompatible", true), + CANNOT_CHANGE_COLUMN_TYPE(10312, "Changing from type {0} to {1} is not supported for column {2}. SerDe may be incompatible", true), + REPLACE_CANNOT_DROP_COLUMNS(10313, "Replacing columns cannot drop columns for table {0}. SerDe may be incompatible", true), + REPLACE_UNSUPPORTED_TYPE_CONVERSION(10314, "Replacing columns with unsupported type conversion (from {0} to {1}) for column {2}. SerDe may be incompatible", true), //========================== 20000 range starts here ========================// SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."), SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. " diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 30cae88..ac0ecd9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -18,10 +18,15 @@ package org.apache.hadoop.hive.ql.exec; +import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcSerde; +import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; import org.apache.hadoop.mapreduce.MRJobConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -3295,6 +3300,11 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part String comment = alterTbl.getNewColComment(); boolean first = alterTbl.getFirst(); String afterCol = alterTbl.getAfterCol(); + // if orc table, restrict reordering columns as it will break schema evolution + boolean isOrc = sd.getInputFormat().equals(OrcInputFormat.class.getName()); + if (isOrc && (first || (afterCol != null && !afterCol.trim().isEmpty()))) { + throw new HiveException(ErrorMsg.CANNOT_REORDER_COLUMNS, alterTbl.getOldName()); + } FieldSchema column = null; boolean found = false; @@ -3311,6 +3321,12 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part && !oldColName.equalsIgnoreCase(oldName)) { throw new HiveException(ErrorMsg.DUPLICATE_COLUMN_NAMES, newName); } else if (oldColName.equalsIgnoreCase(oldName)) { + // if orc table, restrict changing column types. Only integer type promotion is supported. + // smallint -> int -> bigint + if (isOrc && !isSupportedTypeChange(col.getType(), type)) { + throw new HiveException(ErrorMsg.CANNOT_CHANGE_COLUMN_TYPE, col.getType(), type, + newName); + } col.setName(newName); if (type != null && !type.trim().equals("")) { col.setType(type); @@ -3362,9 +3378,29 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part && !serializationLib.equals(LazySimpleSerDe.class.getName()) && !serializationLib.equals(ColumnarSerDe.class.getName()) && !serializationLib.equals(DynamicSerDe.class.getName()) - && !serializationLib.equals(ParquetHiveSerDe.class.getName())) { + && !serializationLib.equals(ParquetHiveSerDe.class.getName()) + && !serializationLib.equals(OrcSerde.class.getName())) { throw new HiveException(ErrorMsg.CANNOT_REPLACE_COLUMNS, alterTbl.getOldName()); } + final boolean isOrc = serializationLib.equals(OrcSerde.class.getName()); + // adding columns and limited integer type promotion is supported for ORC + if (isOrc) { + final List existingCols = sd.getCols(); + final List replaceCols = alterTbl.getNewCols(); + + if (replaceCols.size() < existingCols.size()) { + throw new HiveException(ErrorMsg.REPLACE_CANNOT_DROP_COLUMNS, alterTbl.getOldName()); + } + + for (int i = 0; i < existingCols.size(); i++) { + final String currentColType = existingCols.get(i).getType().toLowerCase().trim(); + final String newColType = replaceCols.get(i).getType().toLowerCase().trim(); + if (!isSupportedTypeChange(currentColType, newColType)) { + throw new HiveException(ErrorMsg.REPLACE_UNSUPPORTED_TYPE_CONVERSION, currentColType, + newColType, replaceCols.get(i).getName()); + } + } + } sd.setCols(alterTbl.getNewCols()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDPROPS) { tbl.getTTable().getParameters().putAll(alterTbl.getProps()); @@ -3380,6 +3416,12 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part StorageDescriptor sd = (part == null ? tbl.getTTable().getSd() : part.getTPartition().getSd()); String serdeName = alterTbl.getSerdeName(); String oldSerdeName = sd.getSerdeInfo().getSerializationLib(); + // if orc table, restrict changing the serde as it can break schema evolution + if (oldSerdeName.equalsIgnoreCase(OrcSerde.class.getName()) && + !serdeName.equalsIgnoreCase(OrcSerde.class.getName())) { + throw new HiveException(ErrorMsg.CANNOT_CHANGE_SERDE, OrcSerde.class.getSimpleName(), + alterTbl.getOldName()); + } sd.getSerdeInfo().setSerializationLib(serdeName); if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0)) { sd.getSerdeInfo().getParameters().putAll(alterTbl.getProps()); @@ -3404,6 +3446,11 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part } } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDFILEFORMAT) { StorageDescriptor sd = (part == null ? tbl.getTTable().getSd() : part.getTPartition().getSd()); + // if orc table, restrict changing the file format as it can break schema evolution + if (sd.getInputFormat().equals(OrcInputFormat.class.getName()) + && !alterTbl.getInputFormat().equals(OrcInputFormat.class.getName())) { + throw new HiveException(ErrorMsg.CANNOT_CHANGE_FILEFORMAT, "ORC", alterTbl.getOldName()); + } sd.setInputFormat(alterTbl.getInputFormat()); sd.setOutputFormat(alterTbl.getOutputFormat()); if (alterTbl.getSerdeName() != null) { @@ -3510,6 +3557,45 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part return 0; } + + // don't change the order of enums as ordinal values are used to check for valid type promotions + enum PromotableTypes { + SMALLINT, + INT, + BIGINT; + + static List types() { + return ImmutableList.of(SMALLINT.toString().toLowerCase(), + INT.toString().toLowerCase(), BIGINT.toString().toLowerCase()); + } + } + + // for ORC, only supported type promotions are smallint -> int -> bigint. No other + // type promotions are supported at this point + private boolean isSupportedTypeChange(String currentType, String newType) { + if (currentType != null && newType != null) { + currentType = currentType.toLowerCase().trim(); + newType = newType.toLowerCase().trim(); + // no type change + if (currentType.equals(newType)) { + return true; + } + if (PromotableTypes.types().contains(currentType) + && PromotableTypes.types().contains(newType)) { + PromotableTypes pCurrentType = PromotableTypes.valueOf(currentType.toUpperCase()); + PromotableTypes pNewType = PromotableTypes.valueOf(newType.toUpperCase()); + if (pNewType.ordinal() >= pCurrentType.ordinal()) { + return true; + } else { + return false; + } + } else { + return false; + } + } + return true; + } + /** * Drop a given table or some partitions. DropTableDesc is currently used for both. * diff --git a/ql/src/test/queries/clientnegative/orc_change_fileformat.q b/ql/src/test/queries/clientnegative/orc_change_fileformat.q new file mode 100644 index 0000000..5b2a7e6 --- /dev/null +++ b/ql/src/test/queries/clientnegative/orc_change_fileformat.q @@ -0,0 +1,2 @@ +create table src_orc (key tinyint, val string) stored as orc; +alter table src_orc set fileformat textfile; diff --git a/ql/src/test/queries/clientnegative/orc_change_serde.q b/ql/src/test/queries/clientnegative/orc_change_serde.q new file mode 100644 index 0000000..e7b70fd --- /dev/null +++ b/ql/src/test/queries/clientnegative/orc_change_serde.q @@ -0,0 +1,2 @@ +create table src_orc (key tinyint, val string) stored as orc; +alter table src_orc set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'; diff --git a/ql/src/test/queries/clientnegative/orc_reorder_columns1.q b/ql/src/test/queries/clientnegative/orc_reorder_columns1.q new file mode 100644 index 0000000..2f43ddb --- /dev/null +++ b/ql/src/test/queries/clientnegative/orc_reorder_columns1.q @@ -0,0 +1,2 @@ +create table src_orc (key tinyint, val string) stored as orc; +alter table src_orc change key k tinyint first; diff --git a/ql/src/test/queries/clientnegative/orc_reorder_columns2.q b/ql/src/test/queries/clientnegative/orc_reorder_columns2.q new file mode 100644 index 0000000..3634d2d --- /dev/null +++ b/ql/src/test/queries/clientnegative/orc_reorder_columns2.q @@ -0,0 +1,2 @@ +create table src_orc (key tinyint, val string) stored as orc; +alter table src_orc change key k tinyint after val; diff --git a/ql/src/test/queries/clientnegative/orc_replace_columns1.q b/ql/src/test/queries/clientnegative/orc_replace_columns1.q new file mode 100644 index 0000000..e5f9449 --- /dev/null +++ b/ql/src/test/queries/clientnegative/orc_replace_columns1.q @@ -0,0 +1,2 @@ +create table src_orc (key tinyint, val string) stored as orc; +alter table src_orc replace columns (k int); diff --git a/ql/src/test/queries/clientnegative/orc_replace_columns2.q b/ql/src/test/queries/clientnegative/orc_replace_columns2.q new file mode 100644 index 0000000..cc6076d --- /dev/null +++ b/ql/src/test/queries/clientnegative/orc_replace_columns2.q @@ -0,0 +1,2 @@ +create table src_orc (key tinyint, val string) stored as orc; +alter table src_orc replace columns (k smallint, val string); diff --git a/ql/src/test/queries/clientnegative/orc_replace_columns3.q b/ql/src/test/queries/clientnegative/orc_replace_columns3.q new file mode 100644 index 0000000..57d3c9b --- /dev/null +++ b/ql/src/test/queries/clientnegative/orc_replace_columns3.q @@ -0,0 +1,3 @@ +create table src_orc (key smallint, val string) stored as orc; +alter table src_orc replace columns (k int, val string, z smallint); +alter table src_orc replace columns (k int, val string, z tinyint); diff --git a/ql/src/test/queries/clientnegative/orc_type_promotion1.q b/ql/src/test/queries/clientnegative/orc_type_promotion1.q new file mode 100644 index 0000000..e465b2a --- /dev/null +++ b/ql/src/test/queries/clientnegative/orc_type_promotion1.q @@ -0,0 +1,2 @@ +create table src_orc (key tinyint, val string) stored as orc; +alter table src_orc change key key float; diff --git a/ql/src/test/queries/clientnegative/orc_type_promotion2.q b/ql/src/test/queries/clientnegative/orc_type_promotion2.q new file mode 100644 index 0000000..a294bee --- /dev/null +++ b/ql/src/test/queries/clientnegative/orc_type_promotion2.q @@ -0,0 +1,9 @@ +create table src_orc (key smallint, val string) stored as orc; +desc src_orc; +alter table src_orc change key key smallint; +desc src_orc; +alter table src_orc change key key int; +desc src_orc; +alter table src_orc change key key bigint; +desc src_orc; +alter table src_orc change val val char(100); diff --git a/ql/src/test/queries/clientnegative/orc_type_promotion3.q b/ql/src/test/queries/clientnegative/orc_type_promotion3.q new file mode 100644 index 0000000..3d85bcee --- /dev/null +++ b/ql/src/test/queries/clientnegative/orc_type_promotion3.q @@ -0,0 +1,2 @@ +create table src_orc (key tinyint, val string) stored as orc; +alter table src_orc change key key smallint; diff --git a/ql/src/test/queries/clientpositive/dbtxnmgr_ddl1.q b/ql/src/test/queries/clientpositive/dbtxnmgr_ddl1.q index 23076a9..a1be063 100644 --- a/ql/src/test/queries/clientpositive/dbtxnmgr_ddl1.q +++ b/ql/src/test/queries/clientpositive/dbtxnmgr_ddl1.q @@ -41,7 +41,6 @@ alter table T4 add partition (ds='tomorrow'); create table T5 (a string, b int); alter table T5 set fileformat orc; -alter table T4 partition (ds='tomorrow') set fileformat RCFILE; create table T7 (a string, b int); alter table T7 set location 'file:///tmp'; diff --git a/ql/src/test/queries/clientpositive/load_orc_part.q b/ql/src/test/queries/clientpositive/load_orc_part.q index 2902c72..281ce4b 100644 --- a/ql/src/test/queries/clientpositive/load_orc_part.q +++ b/ql/src/test/queries/clientpositive/load_orc_part.q @@ -12,13 +12,3 @@ dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/orc_test/ds=10/; load data local inpath '../../data/files/orc_split_elim.orc' overwrite into table orc_staging; load data inpath '${hiveconf:hive.metastore.warehouse.dir}/orc_staging/' overwrite into table orc_test partition (ds='10'); dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/orc_test/ds=10/; - -alter table orc_test add partition(ds='11'); -alter table orc_test partition(ds='11') set fileformat textfile; -load data local inpath '../../data/files/kv1.txt' into table orc_test partition(ds='11'); -dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/orc_test/ds=11/; - -alter table orc_test add partition(ds='12'); -alter table orc_test partition(ds='12') set fileformat textfile; -load data local inpath '../../data/files/types/primitives' into table orc_test partition(ds='12'); -dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/orc_test/ds=12/; diff --git a/ql/src/test/queries/clientpositive/orc_analyze.q b/ql/src/test/queries/clientpositive/orc_analyze.q index 71903a2..4cbe1e6 100644 --- a/ql/src/test/queries/clientpositive/orc_analyze.q +++ b/ql/src/test/queries/clientpositive/orc_analyze.q @@ -182,28 +182,14 @@ STORED AS orc; INSERT OVERWRITE TABLE orc_create_people PARTITION (state) SELECT * FROM orc_create_people_staging ORDER BY id; --- set the table to text format -ALTER TABLE orc_create_people SET SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'; -ALTER TABLE orc_create_people SET FILEFORMAT TEXTFILE; - --- load the text data into a new partition -LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE orc_create_people PARTITION(state="OH"); - --- set the table back to orc -ALTER TABLE orc_create_people SET SERDE 'org.apache.hadoop.hive.ql.io.orc.OrcSerde'; -ALTER TABLE orc_create_people SET FILEFORMAT ORC; - set hive.stats.autogather = true; analyze table orc_create_people partition(state) compute statistics; desc formatted orc_create_people partition(state="Ca"); -desc formatted orc_create_people partition(state="OH"); analyze table orc_create_people partition(state) compute statistics partialscan; desc formatted orc_create_people partition(state="Ca"); -desc formatted orc_create_people partition(state="OH"); analyze table orc_create_people partition(state) compute statistics noscan; desc formatted orc_create_people partition(state="Ca"); -desc formatted orc_create_people partition(state="OH"); drop table orc_create_people; diff --git a/ql/src/test/queries/clientpositive/orc_int_type_promotion.q b/ql/src/test/queries/clientpositive/orc_int_type_promotion.q index 3dfed2b..a5a2308 100644 --- a/ql/src/test/queries/clientpositive/orc_int_type_promotion.q +++ b/ql/src/test/queries/clientpositive/orc_int_type_promotion.q @@ -53,24 +53,12 @@ alter table alltypes_orc change si si bigint; alter table alltypes_orc change i i bigint; select * from alltypes_orc; -alter table alltypes_orc change l l array; -select * from alltypes_orc; - set hive.vectorized.execution.enabled=true; set hive.fetch.task.conversion=none; -alter table alltypes_orc change si si smallint; -alter table alltypes_orc change i i int; explain select ti, si, i, bi from alltypes_orc; select ti, si, i, bi from alltypes_orc; -alter table alltypes_orc change si si int; -select ti, si, i, bi from alltypes_orc; - -alter table alltypes_orc change si si bigint; -alter table alltypes_orc change i i bigint; -select ti, si, i, bi from alltypes_orc; - set hive.exec.dynamic.partition.mode=nonstrict; create table src_part_orc (key int, value string) partitioned by (ds string) stored as orc; insert overwrite table src_part_orc partition(ds) select key, value, ds from srcpart where ds is not null; diff --git a/ql/src/test/queries/clientpositive/orc_schema_evolution.q b/ql/src/test/queries/clientpositive/orc_schema_evolution.q new file mode 100644 index 0000000..285acf4 --- /dev/null +++ b/ql/src/test/queries/clientpositive/orc_schema_evolution.q @@ -0,0 +1,39 @@ +set hive.fetch.task.conversion=none; +create table src_orc (key smallint, val string) stored as orc; +create table src_orc2 (key smallint, val string) stored as orc; + +-- integer type widening +insert overwrite table src_orc select * from src; +select sum(hash(*)) from src_orc; + +alter table src_orc change key key smallint; +select sum(hash(*)) from src_orc; + +alter table src_orc change key key int; +select sum(hash(*)) from src_orc; + +alter table src_orc change key key bigint; +select sum(hash(*)) from src_orc; + +-- replace columns for adding columns and type widening +insert overwrite table src_orc2 select * from src; +select sum(hash(*)) from src_orc2; + +alter table src_orc2 replace columns (k smallint, v string); +select sum(hash(*)) from src_orc2; + +alter table src_orc2 replace columns (k int, v string); +select sum(hash(*)) from src_orc2; + +alter table src_orc2 replace columns (k bigint, v string); +select sum(hash(*)) from src_orc2; + +alter table src_orc2 replace columns (k bigint, v string, z int); +select sum(hash(*)) from src_orc2; + +alter table src_orc2 replace columns (k bigint, v string, z bigint); +select sum(hash(*)) from src_orc2; + +alter table src_orc2 replace columns (k bigint, v string, z bigint, y float); +select sum(hash(*)) from src_orc2; + diff --git a/ql/src/test/queries/clientpositive/vectorization_short_regress.q b/ql/src/test/queries/clientpositive/vectorization_short_regress.q index 88ed00d..3772329 100644 --- a/ql/src/test/queries/clientpositive/vectorization_short_regress.q +++ b/ql/src/test/queries/clientpositive/vectorization_short_regress.q @@ -892,8 +892,19 @@ select count(i) from test_count; select count(i) from test_count; -create table alltypesnull like alltypesorc; -alter table alltypesnull set fileformat textfile; +CREATE TABLE alltypesnull( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN); insert into table alltypesnull select null, null, null, null, null, null, null, null, null, null, null, null from alltypesorc; diff --git a/ql/src/test/results/clientnegative/orc_change_fileformat.q.out b/ql/src/test/results/clientnegative/orc_change_fileformat.q.out new file mode 100644 index 0000000..db454fe --- /dev/null +++ b/ql/src/test/results/clientnegative/orc_change_fileformat.q.out @@ -0,0 +1,13 @@ +PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: alter table src_orc set fileformat textfile +PREHOOK: type: ALTERTABLE_FILEFORMAT +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Changing file format (from ORC) is not supported for table default.src_orc diff --git a/ql/src/test/results/clientnegative/orc_change_serde.q.out b/ql/src/test/results/clientnegative/orc_change_serde.q.out new file mode 100644 index 0000000..7f882b5 --- /dev/null +++ b/ql/src/test/results/clientnegative/orc_change_serde.q.out @@ -0,0 +1,13 @@ +PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: alter table src_orc set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Changing SerDe (from OrcSerde) is not supported for table default.src_orc. File format may be incompatible diff --git a/ql/src/test/results/clientnegative/orc_reorder_columns1.q.out b/ql/src/test/results/clientnegative/orc_reorder_columns1.q.out new file mode 100644 index 0000000..c581f4e --- /dev/null +++ b/ql/src/test/results/clientnegative/orc_reorder_columns1.q.out @@ -0,0 +1,13 @@ +PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: alter table src_orc change key k tinyint first +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible diff --git a/ql/src/test/results/clientnegative/orc_reorder_columns2.q.out b/ql/src/test/results/clientnegative/orc_reorder_columns2.q.out new file mode 100644 index 0000000..54dcdec --- /dev/null +++ b/ql/src/test/results/clientnegative/orc_reorder_columns2.q.out @@ -0,0 +1,13 @@ +PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: alter table src_orc change key k tinyint after val +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible diff --git a/ql/src/test/results/clientnegative/orc_replace_columns.q.out b/ql/src/test/results/clientnegative/orc_replace_columns.q.out new file mode 100644 index 0000000..296a069 --- /dev/null +++ b/ql/src/test/results/clientnegative/orc_replace_columns.q.out @@ -0,0 +1,13 @@ +PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: alter table src_orc replace columns (k int) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Replace columns is not supported for table default.src_orc. SerDe may be incompatible. diff --git a/ql/src/test/results/clientnegative/orc_replace_columns1.q.out b/ql/src/test/results/clientnegative/orc_replace_columns1.q.out new file mode 100644 index 0000000..13f3f14 --- /dev/null +++ b/ql/src/test/results/clientnegative/orc_replace_columns1.q.out @@ -0,0 +1,13 @@ +PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: alter table src_orc replace columns (k int) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Replacing columns cannot drop columns for table default.src_orc. SerDe may be incompatible diff --git a/ql/src/test/results/clientnegative/orc_replace_columns2.q.out b/ql/src/test/results/clientnegative/orc_replace_columns2.q.out new file mode 100644 index 0000000..d581c79 --- /dev/null +++ b/ql/src/test/results/clientnegative/orc_replace_columns2.q.out @@ -0,0 +1,13 @@ +PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: alter table src_orc replace columns (k smallint, val string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Replacing columns with unsupported type conversion (from tinyint to smallint) for column k. SerDe may be incompatible diff --git a/ql/src/test/results/clientnegative/orc_replace_columns3.q.out b/ql/src/test/results/clientnegative/orc_replace_columns3.q.out new file mode 100644 index 0000000..2deb3cb --- /dev/null +++ b/ql/src/test/results/clientnegative/orc_replace_columns3.q.out @@ -0,0 +1,21 @@ +PREHOOK: query: create table src_orc (key smallint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key smallint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: alter table src_orc replace columns (k int, val string, z smallint) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +POSTHOOK: query: alter table src_orc replace columns (k int, val string, z smallint) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@src_orc +POSTHOOK: Output: default@src_orc +PREHOOK: query: alter table src_orc replace columns (k int, val string, z tinyint) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Replacing columns with unsupported type conversion (from smallint to tinyint) for column z. SerDe may be incompatible diff --git a/ql/src/test/results/clientnegative/orc_type_promotion1.q.out b/ql/src/test/results/clientnegative/orc_type_promotion1.q.out new file mode 100644 index 0000000..aa3b64a --- /dev/null +++ b/ql/src/test/results/clientnegative/orc_type_promotion1.q.out @@ -0,0 +1,13 @@ +PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: alter table src_orc change key key float +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Changing from type tinyint to float is not supported for column key. SerDe may be incompatible diff --git a/ql/src/test/results/clientnegative/orc_type_promotion2.q.out b/ql/src/test/results/clientnegative/orc_type_promotion2.q.out new file mode 100644 index 0000000..44ac44f --- /dev/null +++ b/ql/src/test/results/clientnegative/orc_type_promotion2.q.out @@ -0,0 +1,69 @@ +PREHOOK: query: create table src_orc (key smallint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key smallint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: desc src_orc +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@src_orc +POSTHOOK: query: desc src_orc +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@src_orc +key smallint +val string +PREHOOK: query: alter table src_orc change key key smallint +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +POSTHOOK: query: alter table src_orc change key key smallint +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@src_orc +POSTHOOK: Output: default@src_orc +PREHOOK: query: desc src_orc +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@src_orc +POSTHOOK: query: desc src_orc +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@src_orc +key smallint +val string +PREHOOK: query: alter table src_orc change key key int +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +POSTHOOK: query: alter table src_orc change key key int +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@src_orc +POSTHOOK: Output: default@src_orc +PREHOOK: query: desc src_orc +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@src_orc +POSTHOOK: query: desc src_orc +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@src_orc +key int +val string +PREHOOK: query: alter table src_orc change key key bigint +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +POSTHOOK: query: alter table src_orc change key key bigint +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@src_orc +POSTHOOK: Output: default@src_orc +PREHOOK: query: desc src_orc +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@src_orc +POSTHOOK: query: desc src_orc +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@src_orc +key bigint +val string +PREHOOK: query: alter table src_orc change val val char(100) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Changing from type string to char(100) is not supported for column val. SerDe may be incompatible diff --git a/ql/src/test/results/clientnegative/orc_type_promotion3.q.out b/ql/src/test/results/clientnegative/orc_type_promotion3.q.out new file mode 100644 index 0000000..b4630d6 --- /dev/null +++ b/ql/src/test/results/clientnegative/orc_type_promotion3.q.out @@ -0,0 +1,13 @@ +PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: alter table src_orc change key key smallint +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Changing from type tinyint to smallint is not supported for column key. SerDe may be incompatible diff --git a/ql/src/test/results/clientpositive/dbtxnmgr_ddl1.q.out b/ql/src/test/results/clientpositive/dbtxnmgr_ddl1.q.out index b147c02..dadd9f1 100644 --- a/ql/src/test/results/clientpositive/dbtxnmgr_ddl1.q.out +++ b/ql/src/test/results/clientpositive/dbtxnmgr_ddl1.q.out @@ -169,15 +169,6 @@ POSTHOOK: query: alter table T5 set fileformat orc POSTHOOK: type: ALTERTABLE_FILEFORMAT POSTHOOK: Input: default@t5 POSTHOOK: Output: default@t5 -PREHOOK: query: alter table T4 partition (ds='tomorrow') set fileformat RCFILE -PREHOOK: type: ALTERPARTITION_FILEFORMAT -PREHOOK: Input: default@t4 -PREHOOK: Output: default@t4@ds=tomorrow -POSTHOOK: query: alter table T4 partition (ds='tomorrow') set fileformat RCFILE -POSTHOOK: type: ALTERPARTITION_FILEFORMAT -POSTHOOK: Input: default@t4 -POSTHOOK: Input: default@t4@ds=tomorrow -POSTHOOK: Output: default@t4@ds=tomorrow PREHOOK: query: create table T7 (a string, b int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default diff --git a/ql/src/test/results/clientpositive/load_orc_part.q.out b/ql/src/test/results/clientpositive/load_orc_part.q.out index 16346cd..e6872cc 100644 --- a/ql/src/test/results/clientpositive/load_orc_part.q.out +++ b/ql/src/test/results/clientpositive/load_orc_part.q.out @@ -60,55 +60,3 @@ POSTHOOK: type: LOAD POSTHOOK: Output: default@orc_test@ds=10 Found 1 items #### A masked pattern was here #### -PREHOOK: query: alter table orc_test add partition(ds='11') -PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Output: default@orc_test -POSTHOOK: query: alter table orc_test add partition(ds='11') -POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Output: default@orc_test -POSTHOOK: Output: default@orc_test@ds=11 -PREHOOK: query: alter table orc_test partition(ds='11') set fileformat textfile -PREHOOK: type: ALTERPARTITION_FILEFORMAT -PREHOOK: Input: default@orc_test -PREHOOK: Output: default@orc_test@ds=11 -POSTHOOK: query: alter table orc_test partition(ds='11') set fileformat textfile -POSTHOOK: type: ALTERPARTITION_FILEFORMAT -POSTHOOK: Input: default@orc_test -POSTHOOK: Input: default@orc_test@ds=11 -POSTHOOK: Output: default@orc_test@ds=11 -PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table orc_test partition(ds='11') -PREHOOK: type: LOAD -#### A masked pattern was here #### -PREHOOK: Output: default@orc_test@ds=11 -POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table orc_test partition(ds='11') -POSTHOOK: type: LOAD -#### A masked pattern was here #### -POSTHOOK: Output: default@orc_test@ds=11 -Found 1 items -#### A masked pattern was here #### -PREHOOK: query: alter table orc_test add partition(ds='12') -PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Output: default@orc_test -POSTHOOK: query: alter table orc_test add partition(ds='12') -POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Output: default@orc_test -POSTHOOK: Output: default@orc_test@ds=12 -PREHOOK: query: alter table orc_test partition(ds='12') set fileformat textfile -PREHOOK: type: ALTERPARTITION_FILEFORMAT -PREHOOK: Input: default@orc_test -PREHOOK: Output: default@orc_test@ds=12 -POSTHOOK: query: alter table orc_test partition(ds='12') set fileformat textfile -POSTHOOK: type: ALTERPARTITION_FILEFORMAT -POSTHOOK: Input: default@orc_test -POSTHOOK: Input: default@orc_test@ds=12 -POSTHOOK: Output: default@orc_test@ds=12 -PREHOOK: query: load data local inpath '../../data/files/types/primitives' into table orc_test partition(ds='12') -PREHOOK: type: LOAD -#### A masked pattern was here #### -PREHOOK: Output: default@orc_test@ds=12 -POSTHOOK: query: load data local inpath '../../data/files/types/primitives' into table orc_test partition(ds='12') -POSTHOOK: type: LOAD -#### A masked pattern was here #### -POSTHOOK: Output: default@orc_test@ds=12 -Found 4 items -#### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/orc_analyze.q.out b/ql/src/test/results/clientpositive/orc_analyze.q.out index 1156feb..f1d8726 100644 --- a/ql/src/test/results/clientpositive/orc_analyze.q.out +++ b/ql/src/test/results/clientpositive/orc_analyze.q.out @@ -1369,66 +1369,17 @@ POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).id SIMPLE [(orc_create_ POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).last_name SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:last_name, type:string, comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).salary SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:salary, type:decimal(10,0), comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).start_date SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:start_date, type:timestamp, comment:null), ] -PREHOOK: query: -- set the table to text format -ALTER TABLE orc_create_people SET SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' -PREHOOK: type: ALTERTABLE_SERIALIZER -PREHOOK: Input: default@orc_create_people -PREHOOK: Output: default@orc_create_people -POSTHOOK: query: -- set the table to text format -ALTER TABLE orc_create_people SET SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' -POSTHOOK: type: ALTERTABLE_SERIALIZER -POSTHOOK: Input: default@orc_create_people -POSTHOOK: Output: default@orc_create_people -PREHOOK: query: ALTER TABLE orc_create_people SET FILEFORMAT TEXTFILE -PREHOOK: type: ALTERTABLE_FILEFORMAT -PREHOOK: Input: default@orc_create_people -PREHOOK: Output: default@orc_create_people -POSTHOOK: query: ALTER TABLE orc_create_people SET FILEFORMAT TEXTFILE -POSTHOOK: type: ALTERTABLE_FILEFORMAT -POSTHOOK: Input: default@orc_create_people -POSTHOOK: Output: default@orc_create_people -PREHOOK: query: -- load the text data into a new partition -LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE orc_create_people PARTITION(state="OH") -PREHOOK: type: LOAD -#### A masked pattern was here #### -PREHOOK: Output: default@orc_create_people -POSTHOOK: query: -- load the text data into a new partition -LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE orc_create_people PARTITION(state="OH") -POSTHOOK: type: LOAD -#### A masked pattern was here #### -POSTHOOK: Output: default@orc_create_people -POSTHOOK: Output: default@orc_create_people@state=OH -PREHOOK: query: -- set the table back to orc -ALTER TABLE orc_create_people SET SERDE 'org.apache.hadoop.hive.ql.io.orc.OrcSerde' -PREHOOK: type: ALTERTABLE_SERIALIZER -PREHOOK: Input: default@orc_create_people -PREHOOK: Output: default@orc_create_people -POSTHOOK: query: -- set the table back to orc -ALTER TABLE orc_create_people SET SERDE 'org.apache.hadoop.hive.ql.io.orc.OrcSerde' -POSTHOOK: type: ALTERTABLE_SERIALIZER -POSTHOOK: Input: default@orc_create_people -POSTHOOK: Output: default@orc_create_people -PREHOOK: query: ALTER TABLE orc_create_people SET FILEFORMAT ORC -PREHOOK: type: ALTERTABLE_FILEFORMAT -PREHOOK: Input: default@orc_create_people -PREHOOK: Output: default@orc_create_people -POSTHOOK: query: ALTER TABLE orc_create_people SET FILEFORMAT ORC -POSTHOOK: type: ALTERTABLE_FILEFORMAT -POSTHOOK: Input: default@orc_create_people -POSTHOOK: Output: default@orc_create_people PREHOOK: query: analyze table orc_create_people partition(state) compute statistics PREHOOK: type: QUERY PREHOOK: Input: default@orc_create_people PREHOOK: Output: default@orc_create_people PREHOOK: Output: default@orc_create_people@state=Ca -PREHOOK: Output: default@orc_create_people@state=OH PREHOOK: Output: default@orc_create_people@state=Or POSTHOOK: query: analyze table orc_create_people partition(state) compute statistics POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_create_people POSTHOOK: Output: default@orc_create_people POSTHOOK: Output: default@orc_create_people@state=Ca -POSTHOOK: Output: default@orc_create_people@state=OH POSTHOOK: Output: default@orc_create_people@state=Or PREHOOK: query: desc formatted orc_create_people partition(state="Ca") PREHOOK: type: DESCTABLE @@ -1473,62 +1424,17 @@ Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 -PREHOOK: query: desc formatted orc_create_people partition(state="OH") -PREHOOK: type: DESCTABLE -PREHOOK: Input: default@orc_create_people -POSTHOOK: query: desc formatted orc_create_people partition(state="OH") -POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@orc_create_people -# col_name data_type comment - -id int -first_name string -last_name string -address string -salary decimal(10,0) -start_date timestamp - -# Partition Information -# col_name data_type comment - -state string - -# Detailed Partition Information -Partition Value: [OH] -Database: default -Table: orc_create_people -#### A masked pattern was here #### -Partition Parameters: - COLUMN_STATS_ACCURATE false - numFiles 1 - numRows -1 - rawDataSize -1 - totalSize 5812 -#### A masked pattern was here #### - -# Storage Information -SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -InputFormat: org.apache.hadoop.mapred.TextInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat -Compressed: No -Num Buckets: -1 -Bucket Columns: [] -Sort Columns: [] -Storage Desc Params: - serialization.format 1 PREHOOK: query: analyze table orc_create_people partition(state) compute statistics partialscan PREHOOK: type: QUERY PREHOOK: Input: default@orc_create_people PREHOOK: Output: default@orc_create_people PREHOOK: Output: default@orc_create_people@state=Ca -PREHOOK: Output: default@orc_create_people@state=OH PREHOOK: Output: default@orc_create_people@state=Or POSTHOOK: query: analyze table orc_create_people partition(state) compute statistics partialscan POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_create_people POSTHOOK: Output: default@orc_create_people POSTHOOK: Output: default@orc_create_people@state=Ca -POSTHOOK: Output: default@orc_create_people@state=OH POSTHOOK: Output: default@orc_create_people@state=Or PREHOOK: query: desc formatted orc_create_people partition(state="Ca") PREHOOK: type: DESCTABLE @@ -1573,62 +1479,17 @@ Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 -PREHOOK: query: desc formatted orc_create_people partition(state="OH") -PREHOOK: type: DESCTABLE -PREHOOK: Input: default@orc_create_people -POSTHOOK: query: desc formatted orc_create_people partition(state="OH") -POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@orc_create_people -# col_name data_type comment - -id int -first_name string -last_name string -address string -salary decimal(10,0) -start_date timestamp - -# Partition Information -# col_name data_type comment - -state string - -# Detailed Partition Information -Partition Value: [OH] -Database: default -Table: orc_create_people -#### A masked pattern was here #### -Partition Parameters: - COLUMN_STATS_ACCURATE false - numFiles 1 - numRows -1 - rawDataSize -1 - totalSize 5812 -#### A masked pattern was here #### - -# Storage Information -SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -InputFormat: org.apache.hadoop.mapred.TextInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat -Compressed: No -Num Buckets: -1 -Bucket Columns: [] -Sort Columns: [] -Storage Desc Params: - serialization.format 1 PREHOOK: query: analyze table orc_create_people partition(state) compute statistics noscan PREHOOK: type: QUERY PREHOOK: Input: default@orc_create_people PREHOOK: Output: default@orc_create_people PREHOOK: Output: default@orc_create_people@state=Ca -PREHOOK: Output: default@orc_create_people@state=OH PREHOOK: Output: default@orc_create_people@state=Or POSTHOOK: query: analyze table orc_create_people partition(state) compute statistics noscan POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_create_people POSTHOOK: Output: default@orc_create_people POSTHOOK: Output: default@orc_create_people@state=Ca -POSTHOOK: Output: default@orc_create_people@state=OH POSTHOOK: Output: default@orc_create_people@state=Or PREHOOK: query: desc formatted orc_create_people partition(state="Ca") PREHOOK: type: DESCTABLE @@ -1673,49 +1534,6 @@ Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 -PREHOOK: query: desc formatted orc_create_people partition(state="OH") -PREHOOK: type: DESCTABLE -PREHOOK: Input: default@orc_create_people -POSTHOOK: query: desc formatted orc_create_people partition(state="OH") -POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@orc_create_people -# col_name data_type comment - -id int -first_name string -last_name string -address string -salary decimal(10,0) -start_date timestamp - -# Partition Information -# col_name data_type comment - -state string - -# Detailed Partition Information -Partition Value: [OH] -Database: default -Table: orc_create_people -#### A masked pattern was here #### -Partition Parameters: - COLUMN_STATS_ACCURATE false - numFiles 1 - numRows -1 - rawDataSize -1 - totalSize 5812 -#### A masked pattern was here #### - -# Storage Information -SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -InputFormat: org.apache.hadoop.mapred.TextInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat -Compressed: No -Num Buckets: -1 -Bucket Columns: [] -Sort Columns: [] -Storage Desc Params: - serialization.format 1 PREHOOK: query: drop table orc_create_people PREHOOK: type: DROPTABLE PREHOOK: Input: default@orc_create_people diff --git a/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out b/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out index 03e2f7f..75769ec 100644 --- a/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out +++ b/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out @@ -172,40 +172,6 @@ POSTHOOK: Input: default@alltypes_orc #### A masked pattern was here #### true 10 100 1000 10000 4.0 20.0 4.222 1969-12-31 15:59:58.174 1970-01-01 string hello hello {"k1":"v1","k2":"v2"} [100,200] {"c1":null,"c2":" \"foo\"}"} false 20 200 2000 20000 8.0 40.0 2.222 1970-12-31 15:59:58.174 1971-01-01 abcd world world {"k3":"v3","k4":"v4"} [200,300] {"c1":null,"c2":" \"bar\"}"} -PREHOOK: query: alter table alltypes_orc change l l array -PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alltypes_orc -PREHOOK: Output: default@alltypes_orc -POSTHOOK: query: alter table alltypes_orc change l l array -POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alltypes_orc -POSTHOOK: Output: default@alltypes_orc -PREHOOK: query: select * from alltypes_orc -PREHOOK: type: QUERY -PREHOOK: Input: default@alltypes_orc -#### A masked pattern was here #### -POSTHOOK: query: select * from alltypes_orc -POSTHOOK: type: QUERY -POSTHOOK: Input: default@alltypes_orc -#### A masked pattern was here #### -true 10 100 1000 10000 4.0 20.0 4.222 1969-12-31 15:59:58.174 1970-01-01 string hello hello {"k1":"v1","k2":"v2"} [100,200] {"c1":null,"c2":" \"foo\"}"} -false 20 200 2000 20000 8.0 40.0 2.222 1970-12-31 15:59:58.174 1971-01-01 abcd world world {"k3":"v3","k4":"v4"} [200,300] {"c1":null,"c2":" \"bar\"}"} -PREHOOK: query: alter table alltypes_orc change si si smallint -PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alltypes_orc -PREHOOK: Output: default@alltypes_orc -POSTHOOK: query: alter table alltypes_orc change si si smallint -POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alltypes_orc -POSTHOOK: Output: default@alltypes_orc -PREHOOK: query: alter table alltypes_orc change i i int -PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alltypes_orc -PREHOOK: Output: default@alltypes_orc -POSTHOOK: query: alter table alltypes_orc change i i int -POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alltypes_orc -POSTHOOK: Output: default@alltypes_orc PREHOOK: query: explain select ti, si, i, bi from alltypes_orc PREHOOK: type: QUERY POSTHOOK: query: explain select ti, si, i, bi from alltypes_orc @@ -220,14 +186,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypes_orc - Statistics: Num rows: 88 Data size: 1766 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 63 Data size: 1766 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint) + expressions: ti (type: tinyint), si (type: bigint), i (type: bigint), bi (type: bigint) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 88 Data size: 1766 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 63 Data size: 1766 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 88 Data size: 1766 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 63 Data size: 1766 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -250,50 +216,6 @@ POSTHOOK: Input: default@alltypes_orc #### A masked pattern was here #### 10 100 1000 10000 20 200 2000 20000 -PREHOOK: query: alter table alltypes_orc change si si int -PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alltypes_orc -PREHOOK: Output: default@alltypes_orc -POSTHOOK: query: alter table alltypes_orc change si si int -POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alltypes_orc -POSTHOOK: Output: default@alltypes_orc -PREHOOK: query: select ti, si, i, bi from alltypes_orc -PREHOOK: type: QUERY -PREHOOK: Input: default@alltypes_orc -#### A masked pattern was here #### -POSTHOOK: query: select ti, si, i, bi from alltypes_orc -POSTHOOK: type: QUERY -POSTHOOK: Input: default@alltypes_orc -#### A masked pattern was here #### -10 100 1000 10000 -20 200 2000 20000 -PREHOOK: query: alter table alltypes_orc change si si bigint -PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alltypes_orc -PREHOOK: Output: default@alltypes_orc -POSTHOOK: query: alter table alltypes_orc change si si bigint -POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alltypes_orc -POSTHOOK: Output: default@alltypes_orc -PREHOOK: query: alter table alltypes_orc change i i bigint -PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alltypes_orc -PREHOOK: Output: default@alltypes_orc -POSTHOOK: query: alter table alltypes_orc change i i bigint -POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alltypes_orc -POSTHOOK: Output: default@alltypes_orc -PREHOOK: query: select ti, si, i, bi from alltypes_orc -PREHOOK: type: QUERY -PREHOOK: Input: default@alltypes_orc -#### A masked pattern was here #### -POSTHOOK: query: select ti, si, i, bi from alltypes_orc -POSTHOOK: type: QUERY -POSTHOOK: Input: default@alltypes_orc -#### A masked pattern was here #### -10 100 1000 10000 -20 200 2000 20000 PREHOOK: query: create table src_part_orc (key int, value string) partitioned by (ds string) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default diff --git a/ql/src/test/results/clientpositive/orc_schema_evolution.q.out b/ql/src/test/results/clientpositive/orc_schema_evolution.q.out new file mode 100644 index 0000000..9707b14 --- /dev/null +++ b/ql/src/test/results/clientpositive/orc_schema_evolution.q.out @@ -0,0 +1,211 @@ +PREHOOK: query: create table src_orc (key smallint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc +POSTHOOK: query: create table src_orc (key smallint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc +PREHOOK: query: create table src_orc2 (key smallint, val string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@src_orc2 +POSTHOOK: query: create table src_orc2 (key smallint, val string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_orc2 +PREHOOK: query: -- integer type widening +insert overwrite table src_orc select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@src_orc +POSTHOOK: query: -- integer type widening +insert overwrite table src_orc select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@src_orc +POSTHOOK: Lineage: src_orc.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_orc.val SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select sum(hash(*)) from src_orc +PREHOOK: type: QUERY +PREHOOK: Input: default@src_orc +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from src_orc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_orc +#### A masked pattern was here #### +36214430891 +PREHOOK: query: alter table src_orc change key key smallint +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +POSTHOOK: query: alter table src_orc change key key smallint +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@src_orc +POSTHOOK: Output: default@src_orc +PREHOOK: query: select sum(hash(*)) from src_orc +PREHOOK: type: QUERY +PREHOOK: Input: default@src_orc +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from src_orc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_orc +#### A masked pattern was here #### +36214430891 +PREHOOK: query: alter table src_orc change key key int +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +POSTHOOK: query: alter table src_orc change key key int +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@src_orc +POSTHOOK: Output: default@src_orc +PREHOOK: query: select sum(hash(*)) from src_orc +PREHOOK: type: QUERY +PREHOOK: Input: default@src_orc +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from src_orc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_orc +#### A masked pattern was here #### +36214430891 +PREHOOK: query: alter table src_orc change key key bigint +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@src_orc +PREHOOK: Output: default@src_orc +POSTHOOK: query: alter table src_orc change key key bigint +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@src_orc +POSTHOOK: Output: default@src_orc +PREHOOK: query: select sum(hash(*)) from src_orc +PREHOOK: type: QUERY +PREHOOK: Input: default@src_orc +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from src_orc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_orc +#### A masked pattern was here #### +36214430891 +PREHOOK: query: -- replace columns for adding columns and type widening +insert overwrite table src_orc2 select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@src_orc2 +POSTHOOK: query: -- replace columns for adding columns and type widening +insert overwrite table src_orc2 select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@src_orc2 +POSTHOOK: Lineage: src_orc2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_orc2.val SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select sum(hash(*)) from src_orc2 +PREHOOK: type: QUERY +PREHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from src_orc2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +36214430891 +PREHOOK: query: alter table src_orc2 replace columns (k smallint, v string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@src_orc2 +PREHOOK: Output: default@src_orc2 +POSTHOOK: query: alter table src_orc2 replace columns (k smallint, v string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@src_orc2 +POSTHOOK: Output: default@src_orc2 +PREHOOK: query: select sum(hash(*)) from src_orc2 +PREHOOK: type: QUERY +PREHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from src_orc2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +36214430891 +PREHOOK: query: alter table src_orc2 replace columns (k int, v string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@src_orc2 +PREHOOK: Output: default@src_orc2 +POSTHOOK: query: alter table src_orc2 replace columns (k int, v string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@src_orc2 +POSTHOOK: Output: default@src_orc2 +PREHOOK: query: select sum(hash(*)) from src_orc2 +PREHOOK: type: QUERY +PREHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from src_orc2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +36214430891 +PREHOOK: query: alter table src_orc2 replace columns (k bigint, v string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@src_orc2 +PREHOOK: Output: default@src_orc2 +POSTHOOK: query: alter table src_orc2 replace columns (k bigint, v string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@src_orc2 +POSTHOOK: Output: default@src_orc2 +PREHOOK: query: select sum(hash(*)) from src_orc2 +PREHOOK: type: QUERY +PREHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from src_orc2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +36214430891 +PREHOOK: query: alter table src_orc2 replace columns (k bigint, v string, z int) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@src_orc2 +PREHOOK: Output: default@src_orc2 +POSTHOOK: query: alter table src_orc2 replace columns (k bigint, v string, z int) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@src_orc2 +POSTHOOK: Output: default@src_orc2 +PREHOOK: query: select sum(hash(*)) from src_orc2 +PREHOOK: type: QUERY +PREHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from src_orc2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +-586749626187 +PREHOOK: query: alter table src_orc2 replace columns (k bigint, v string, z bigint) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@src_orc2 +PREHOOK: Output: default@src_orc2 +POSTHOOK: query: alter table src_orc2 replace columns (k bigint, v string, z bigint) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@src_orc2 +POSTHOOK: Output: default@src_orc2 +PREHOOK: query: select sum(hash(*)) from src_orc2 +PREHOOK: type: QUERY +PREHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from src_orc2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +-586749626187 +PREHOOK: query: alter table src_orc2 replace columns (k bigint, v string, z bigint, y float) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@src_orc2 +PREHOOK: Output: default@src_orc2 +POSTHOOK: query: alter table src_orc2 replace columns (k bigint, v string, z bigint, y float) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@src_orc2 +POSTHOOK: Output: default@src_orc2 +PREHOOK: query: select sum(hash(*)) from src_orc2 +PREHOOK: type: QUERY +PREHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(*)) from src_orc2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_orc2 +#### A masked pattern was here #### +-700131582485 diff --git a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out index 6de88bf..d6b7208 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out @@ -2964,22 +2964,38 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test_count #### A masked pattern was here #### 0 -PREHOOK: query: create table alltypesnull like alltypesorc +PREHOOK: query: CREATE TABLE alltypesnull( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@alltypesnull -POSTHOOK: query: create table alltypesnull like alltypesorc +POSTHOOK: query: CREATE TABLE alltypesnull( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@alltypesnull -PREHOOK: query: alter table alltypesnull set fileformat textfile -PREHOOK: type: ALTERTABLE_FILEFORMAT -PREHOOK: Input: default@alltypesnull -PREHOOK: Output: default@alltypesnull -POSTHOOK: query: alter table alltypesnull set fileformat textfile -POSTHOOK: type: ALTERTABLE_FILEFORMAT -POSTHOOK: Input: default@alltypesnull -POSTHOOK: Output: default@alltypesnull PREHOOK: query: insert into table alltypesnull select null, null, null, null, null, null, null, null, null, null, null, null from alltypesorc PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc diff --git a/ql/src/test/results/clientpositive/tez/orc_analyze.q.out b/ql/src/test/results/clientpositive/tez/orc_analyze.q.out index 1156feb..f1d8726 100644 --- a/ql/src/test/results/clientpositive/tez/orc_analyze.q.out +++ b/ql/src/test/results/clientpositive/tez/orc_analyze.q.out @@ -1369,66 +1369,17 @@ POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).id SIMPLE [(orc_create_ POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).last_name SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:last_name, type:string, comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).salary SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:salary, type:decimal(10,0), comment:null), ] POSTHOOK: Lineage: orc_create_people PARTITION(state=Or).start_date SIMPLE [(orc_create_people_staging)orc_create_people_staging.FieldSchema(name:start_date, type:timestamp, comment:null), ] -PREHOOK: query: -- set the table to text format -ALTER TABLE orc_create_people SET SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' -PREHOOK: type: ALTERTABLE_SERIALIZER -PREHOOK: Input: default@orc_create_people -PREHOOK: Output: default@orc_create_people -POSTHOOK: query: -- set the table to text format -ALTER TABLE orc_create_people SET SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' -POSTHOOK: type: ALTERTABLE_SERIALIZER -POSTHOOK: Input: default@orc_create_people -POSTHOOK: Output: default@orc_create_people -PREHOOK: query: ALTER TABLE orc_create_people SET FILEFORMAT TEXTFILE -PREHOOK: type: ALTERTABLE_FILEFORMAT -PREHOOK: Input: default@orc_create_people -PREHOOK: Output: default@orc_create_people -POSTHOOK: query: ALTER TABLE orc_create_people SET FILEFORMAT TEXTFILE -POSTHOOK: type: ALTERTABLE_FILEFORMAT -POSTHOOK: Input: default@orc_create_people -POSTHOOK: Output: default@orc_create_people -PREHOOK: query: -- load the text data into a new partition -LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE orc_create_people PARTITION(state="OH") -PREHOOK: type: LOAD -#### A masked pattern was here #### -PREHOOK: Output: default@orc_create_people -POSTHOOK: query: -- load the text data into a new partition -LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE orc_create_people PARTITION(state="OH") -POSTHOOK: type: LOAD -#### A masked pattern was here #### -POSTHOOK: Output: default@orc_create_people -POSTHOOK: Output: default@orc_create_people@state=OH -PREHOOK: query: -- set the table back to orc -ALTER TABLE orc_create_people SET SERDE 'org.apache.hadoop.hive.ql.io.orc.OrcSerde' -PREHOOK: type: ALTERTABLE_SERIALIZER -PREHOOK: Input: default@orc_create_people -PREHOOK: Output: default@orc_create_people -POSTHOOK: query: -- set the table back to orc -ALTER TABLE orc_create_people SET SERDE 'org.apache.hadoop.hive.ql.io.orc.OrcSerde' -POSTHOOK: type: ALTERTABLE_SERIALIZER -POSTHOOK: Input: default@orc_create_people -POSTHOOK: Output: default@orc_create_people -PREHOOK: query: ALTER TABLE orc_create_people SET FILEFORMAT ORC -PREHOOK: type: ALTERTABLE_FILEFORMAT -PREHOOK: Input: default@orc_create_people -PREHOOK: Output: default@orc_create_people -POSTHOOK: query: ALTER TABLE orc_create_people SET FILEFORMAT ORC -POSTHOOK: type: ALTERTABLE_FILEFORMAT -POSTHOOK: Input: default@orc_create_people -POSTHOOK: Output: default@orc_create_people PREHOOK: query: analyze table orc_create_people partition(state) compute statistics PREHOOK: type: QUERY PREHOOK: Input: default@orc_create_people PREHOOK: Output: default@orc_create_people PREHOOK: Output: default@orc_create_people@state=Ca -PREHOOK: Output: default@orc_create_people@state=OH PREHOOK: Output: default@orc_create_people@state=Or POSTHOOK: query: analyze table orc_create_people partition(state) compute statistics POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_create_people POSTHOOK: Output: default@orc_create_people POSTHOOK: Output: default@orc_create_people@state=Ca -POSTHOOK: Output: default@orc_create_people@state=OH POSTHOOK: Output: default@orc_create_people@state=Or PREHOOK: query: desc formatted orc_create_people partition(state="Ca") PREHOOK: type: DESCTABLE @@ -1473,62 +1424,17 @@ Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 -PREHOOK: query: desc formatted orc_create_people partition(state="OH") -PREHOOK: type: DESCTABLE -PREHOOK: Input: default@orc_create_people -POSTHOOK: query: desc formatted orc_create_people partition(state="OH") -POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@orc_create_people -# col_name data_type comment - -id int -first_name string -last_name string -address string -salary decimal(10,0) -start_date timestamp - -# Partition Information -# col_name data_type comment - -state string - -# Detailed Partition Information -Partition Value: [OH] -Database: default -Table: orc_create_people -#### A masked pattern was here #### -Partition Parameters: - COLUMN_STATS_ACCURATE false - numFiles 1 - numRows -1 - rawDataSize -1 - totalSize 5812 -#### A masked pattern was here #### - -# Storage Information -SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -InputFormat: org.apache.hadoop.mapred.TextInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat -Compressed: No -Num Buckets: -1 -Bucket Columns: [] -Sort Columns: [] -Storage Desc Params: - serialization.format 1 PREHOOK: query: analyze table orc_create_people partition(state) compute statistics partialscan PREHOOK: type: QUERY PREHOOK: Input: default@orc_create_people PREHOOK: Output: default@orc_create_people PREHOOK: Output: default@orc_create_people@state=Ca -PREHOOK: Output: default@orc_create_people@state=OH PREHOOK: Output: default@orc_create_people@state=Or POSTHOOK: query: analyze table orc_create_people partition(state) compute statistics partialscan POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_create_people POSTHOOK: Output: default@orc_create_people POSTHOOK: Output: default@orc_create_people@state=Ca -POSTHOOK: Output: default@orc_create_people@state=OH POSTHOOK: Output: default@orc_create_people@state=Or PREHOOK: query: desc formatted orc_create_people partition(state="Ca") PREHOOK: type: DESCTABLE @@ -1573,62 +1479,17 @@ Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 -PREHOOK: query: desc formatted orc_create_people partition(state="OH") -PREHOOK: type: DESCTABLE -PREHOOK: Input: default@orc_create_people -POSTHOOK: query: desc formatted orc_create_people partition(state="OH") -POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@orc_create_people -# col_name data_type comment - -id int -first_name string -last_name string -address string -salary decimal(10,0) -start_date timestamp - -# Partition Information -# col_name data_type comment - -state string - -# Detailed Partition Information -Partition Value: [OH] -Database: default -Table: orc_create_people -#### A masked pattern was here #### -Partition Parameters: - COLUMN_STATS_ACCURATE false - numFiles 1 - numRows -1 - rawDataSize -1 - totalSize 5812 -#### A masked pattern was here #### - -# Storage Information -SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -InputFormat: org.apache.hadoop.mapred.TextInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat -Compressed: No -Num Buckets: -1 -Bucket Columns: [] -Sort Columns: [] -Storage Desc Params: - serialization.format 1 PREHOOK: query: analyze table orc_create_people partition(state) compute statistics noscan PREHOOK: type: QUERY PREHOOK: Input: default@orc_create_people PREHOOK: Output: default@orc_create_people PREHOOK: Output: default@orc_create_people@state=Ca -PREHOOK: Output: default@orc_create_people@state=OH PREHOOK: Output: default@orc_create_people@state=Or POSTHOOK: query: analyze table orc_create_people partition(state) compute statistics noscan POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_create_people POSTHOOK: Output: default@orc_create_people POSTHOOK: Output: default@orc_create_people@state=Ca -POSTHOOK: Output: default@orc_create_people@state=OH POSTHOOK: Output: default@orc_create_people@state=Or PREHOOK: query: desc formatted orc_create_people partition(state="Ca") PREHOOK: type: DESCTABLE @@ -1673,49 +1534,6 @@ Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 -PREHOOK: query: desc formatted orc_create_people partition(state="OH") -PREHOOK: type: DESCTABLE -PREHOOK: Input: default@orc_create_people -POSTHOOK: query: desc formatted orc_create_people partition(state="OH") -POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@orc_create_people -# col_name data_type comment - -id int -first_name string -last_name string -address string -salary decimal(10,0) -start_date timestamp - -# Partition Information -# col_name data_type comment - -state string - -# Detailed Partition Information -Partition Value: [OH] -Database: default -Table: orc_create_people -#### A masked pattern was here #### -Partition Parameters: - COLUMN_STATS_ACCURATE false - numFiles 1 - numRows -1 - rawDataSize -1 - totalSize 5812 -#### A masked pattern was here #### - -# Storage Information -SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -InputFormat: org.apache.hadoop.mapred.TextInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat -Compressed: No -Num Buckets: -1 -Bucket Columns: [] -Sort Columns: [] -Storage Desc Params: - serialization.format 1 PREHOOK: query: drop table orc_create_people PREHOOK: type: DROPTABLE PREHOOK: Input: default@orc_create_people diff --git a/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out index 4925e65..f34761c 100644 --- a/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out @@ -2964,22 +2964,38 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test_count #### A masked pattern was here #### 0 -PREHOOK: query: create table alltypesnull like alltypesorc +PREHOOK: query: CREATE TABLE alltypesnull( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@alltypesnull -POSTHOOK: query: create table alltypesnull like alltypesorc +POSTHOOK: query: CREATE TABLE alltypesnull( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@alltypesnull -PREHOOK: query: alter table alltypesnull set fileformat textfile -PREHOOK: type: ALTERTABLE_FILEFORMAT -PREHOOK: Input: default@alltypesnull -PREHOOK: Output: default@alltypesnull -POSTHOOK: query: alter table alltypesnull set fileformat textfile -POSTHOOK: type: ALTERTABLE_FILEFORMAT -POSTHOOK: Input: default@alltypesnull -POSTHOOK: Output: default@alltypesnull PREHOOK: query: insert into table alltypesnull select null, null, null, null, null, null, null, null, null, null, null, null from alltypesorc PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc diff --git a/ql/src/test/results/clientpositive/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/vectorization_short_regress.q.out index 73034dd..78b5d07 100644 --- a/ql/src/test/results/clientpositive/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/vectorization_short_regress.q.out @@ -2910,22 +2910,38 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test_count #### A masked pattern was here #### 0 -PREHOOK: query: create table alltypesnull like alltypesorc +PREHOOK: query: CREATE TABLE alltypesnull( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@alltypesnull -POSTHOOK: query: create table alltypesnull like alltypesorc +POSTHOOK: query: CREATE TABLE alltypesnull( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@alltypesnull -PREHOOK: query: alter table alltypesnull set fileformat textfile -PREHOOK: type: ALTERTABLE_FILEFORMAT -PREHOOK: Input: default@alltypesnull -PREHOOK: Output: default@alltypesnull -POSTHOOK: query: alter table alltypesnull set fileformat textfile -POSTHOOK: type: ALTERTABLE_FILEFORMAT -POSTHOOK: Input: default@alltypesnull -POSTHOOK: Output: default@alltypesnull PREHOOK: query: insert into table alltypesnull select null, null, null, null, null, null, null, null, null, null, null, null from alltypesorc PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc