diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index a3ddbda..a20852a 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -750,6 +750,7 @@ minillaplocal.query.files=\ vector_join_filters.q,\ vector_leftsemi_mapjoin.q,\ vector_like_2.q,\ + vector_llap_io_data_conversion.q,\ vector_llap_text_1.q,\ vector_mapjoin_reduce.q,\ vector_null_map.q,\ diff --git llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java index 201c097..15ea7e3 100644 --- llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java +++ llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java @@ -291,6 +291,11 @@ public void start() { private boolean checkOrcSchemaEvolution() { SchemaEvolution evolution = rp.getSchemaEvolution(); + if (evolution.hasConversion()) { + + // We do not support data type conversion when reading encoded ORC data. + return false; + } // TODO: should this just use physical IDs? for (int i = 0; i < includes.getReaderLogicalColumnIds().size(); ++i) { int projectedColId = includes.getReaderLogicalColumnIds().get(i); diff --git ql/src/test/queries/clientpositive/vector_llap_io_data_conversion.q ql/src/test/queries/clientpositive/vector_llap_io_data_conversion.q new file mode 100644 index 0000000..f40c4b9 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_llap_io_data_conversion.q @@ -0,0 +1,19 @@ +--! qt:dataset:alltypesorc +set hive.explain.user=false; +SET hive.vectorized.execution.enabled=true; + +set hive.llap.io.enabled=true; +set hive.llap.io.encode.enabled=true; + +create table varchar_single_partition(vt varchar(10), vsi varchar(10), vi varchar(20), vb varchar(30), vf varchar(20),vd varchar(20),vs varchar(50)) + partitioned by(s varchar(50)) stored as orc; +insert into table varchar_single_partition partition(s='positive') select ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1 from alltypesorc where cint>0 limit 10; +insert into table varchar_single_partition partition(s='negative') select ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1 from alltypesorc where cint<0 limit 10; +alter table varchar_single_partition change column vs vs varchar(10); + +create table varchar_ctas_1 stored as orc as select vs, length(vs) as c1,reverse(vs) as c2 from varchar_single_partition where s='positive'; + +explain vectorization detail +select * from varchar_ctas_1 order by vs, c1, c2; + +select * from varchar_ctas_1 order by vs, c1, c2; \ No newline at end of file diff --git ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out index 45586be..53d5c62 100644 --- ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out +++ ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out @@ -205,13 +205,8 @@ POSTHOOK: Lineage: orc_ppd_n3.v EXPRESSION [(orc_ppd_staging_n2)orc_ppd_staging_ PREHOOK: query: select count(*) from orc_ppd_n3 where t > 127 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17010 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 7 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -245,13 +240,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t > 127 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 RECORDS_OUT_0: 1 @@ -267,13 +257,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 55 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -305,13 +290,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 55 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 720 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 4 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -345,13 +325,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 54 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 102 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -383,13 +358,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 54 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 102 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -425,13 +395,8 @@ PREHOOK: Output: default@orc_ppd_n3 PREHOOK: query: select count(*) from orc_ppd_n3 where t > 127 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -447,13 +412,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -463,13 +422,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t > 127 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 RECORDS_OUT_0: 1 @@ -485,13 +439,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 55 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -507,13 +456,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -523,13 +466,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 55 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -545,13 +483,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 1 - NUM_VECTOR_BATCHES: 1 - ROWS_EMITTED: 1000 - SELECTED_ROWGROUPS: 1 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -561,13 +493,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 54 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 102 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -583,13 +510,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -599,13 +520,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 54 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 102 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -621,13 +537,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 2 - NUM_VECTOR_BATCHES: 2 - ROWS_EMITTED: 2000 - SELECTED_ROWGROUPS: 2 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -641,13 +551,8 @@ PREHOOK: Output: default@orc_ppd_n3 PREHOOK: query: select count(*) from orc_ppd_n3 where t > 127 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -663,13 +568,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -679,13 +578,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t > 127 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 RECORDS_OUT_0: 1 @@ -701,13 +595,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 55 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -723,13 +612,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -739,13 +622,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 55 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -761,13 +639,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 1 - NUM_VECTOR_BATCHES: 1 - ROWS_EMITTED: 1000 - SELECTED_ROWGROUPS: 1 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -777,13 +649,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 54 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 102 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -799,13 +666,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -815,13 +676,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 54 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 102 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -837,13 +693,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 2 - NUM_VECTOR_BATCHES: 2 - ROWS_EMITTED: 2000 - SELECTED_ROWGROUPS: 2 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -857,13 +707,8 @@ PREHOOK: Output: default@orc_ppd_n3 PREHOOK: query: select count(*) from orc_ppd_n3 where t > 127 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -879,13 +724,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -895,13 +734,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t > 127 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 RECORDS_OUT_0: 1 @@ -917,13 +751,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 55 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -939,13 +768,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -955,13 +778,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 55 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -977,13 +795,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 1 - NUM_VECTOR_BATCHES: 1 - ROWS_EMITTED: 1000 - SELECTED_ROWGROUPS: 1 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -993,13 +805,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 54 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 102 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1015,13 +822,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -1031,13 +832,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = 54 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 102 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1053,13 +849,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 2 - NUM_VECTOR_BATCHES: 2 - ROWS_EMITTED: 2000 - SELECTED_ROWGROUPS: 2 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -1073,13 +863,8 @@ PREHOOK: Output: default@orc_ppd_n3 PREHOOK: query: select count(*) from orc_ppd_n3 where t > '127' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16900 - HDFS_BYTES_WRITTEN: 104 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1105,13 +890,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t > '127' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17730 - HDFS_BYTES_WRITTEN: 104 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1137,13 +917,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = '55' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16900 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1169,13 +944,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = '55' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17730 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1201,13 +971,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = '54' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16900 - HDFS_BYTES_WRITTEN: 102 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1233,13 +998,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where t = '54' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17730 - HDFS_BYTES_WRITTEN: 102 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1265,13 +1025,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where f = 74.72 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 4896 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 4 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1305,13 +1060,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where f = 74.72 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 1750 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 4 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1349,13 +1099,8 @@ PREHOOK: Output: default@orc_ppd_n3 PREHOOK: query: select count(*) from orc_ppd_n3 where f = 74.72 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 21443 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1381,13 +1126,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where f = 74.72 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 23321 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1417,13 +1157,8 @@ PREHOOK: Output: default@orc_ppd_n3 PREHOOK: query: select count(*) from orc_ppd_n3 where f = '74.72' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 21443 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1449,13 +1184,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where f = '74.72' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 23321 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1481,13 +1211,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where s = 'bob davidson' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 4322 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 4 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1521,13 +1246,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where s = 'bob davidson' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 1589 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 4 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1565,13 +1285,8 @@ PREHOOK: Output: default@orc_ppd_n3 PREHOOK: query: select count(*) from orc_ppd_n3 where s = 'bob davidson' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 20860 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1597,13 +1312,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where s = 'bob davidson' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 22586 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1633,13 +1343,8 @@ PREHOOK: Output: default@orc_ppd_n3 PREHOOK: query: select count(*) from orc_ppd_n3 where s = 'bob davidson' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1655,13 +1360,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 6 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 4346 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -1671,13 +1370,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where s = 'bob davidson' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1693,13 +1387,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 6 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 5935 - CACHE_MISS_BYTES: 0 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -1713,13 +1401,8 @@ PREHOOK: Output: default@orc_ppd_n3 PREHOOK: query: select count(*) from orc_ppd_n3 where s = 'bob davidson' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 20860 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1745,13 +1428,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where s = 'bob davidson' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 22586 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1781,13 +1459,8 @@ PREHOOK: Output: default@orc_ppd_n3 PREHOOK: query: select count(*) from orc_ppd_n3 where s = 'bob davidson' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1819,13 +1492,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where s = 'bob davidson' PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 0 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 3 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1861,13 +1529,8 @@ PREHOOK: Output: default@orc_ppd_n3 PREHOOK: query: select count(*) from orc_ppd_n3 where si = 442 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 2062 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 4 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1883,15 +1546,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 4 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - ALLOCATED_BYTES: 786432 - ALLOCATED_USED_BYTES: 4264 - CACHE_HIT_BYTES: 24 - CACHE_MISS_BYTES: 2062 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -1901,13 +1556,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where si = 442 or boo is not null or boo = false PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 18628 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1933,13 +1583,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where si = 442 PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 1215 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 4 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 @@ -1955,15 +1600,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 4 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - ALLOCATED_BYTES: 262144 - ALLOCATED_USED_BYTES: 2376 - CACHE_HIT_BYTES: 2086 - CACHE_MISS_BYTES: 1215 - METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 1 - NUM_VECTOR_BATCHES: 1 - ROWS_EMITTED: 1000 - SELECTED_ROWGROUPS: 1 + METADATA_CACHE_HIT: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -1973,13 +1610,8 @@ Stage-1 INPUT COUNTERS: PREHOOK: query: select count(*) from orc_ppd_n3 where si = 442 or boo is not null or boo = false PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 19952 - HDFS_BYTES_WRITTEN: 101 - HDFS_READ_OPS: 5 - HDFS_LARGE_READ_OPS: 0 - HDFS_WRITE_OPS: 2 Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 diff --git ql/src/test/results/clientpositive/llap/vector_llap_io_data_conversion.q.out ql/src/test/results/clientpositive/llap/vector_llap_io_data_conversion.q.out new file mode 100644 index 0000000..f503761 --- /dev/null +++ ql/src/test/results/clientpositive/llap/vector_llap_io_data_conversion.q.out @@ -0,0 +1,187 @@ +PREHOOK: query: create table varchar_single_partition(vt varchar(10), vsi varchar(10), vi varchar(20), vb varchar(30), vf varchar(20),vd varchar(20),vs varchar(50)) + partitioned by(s varchar(50)) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@varchar_single_partition +POSTHOOK: query: create table varchar_single_partition(vt varchar(10), vsi varchar(10), vi varchar(20), vb varchar(30), vf varchar(20),vd varchar(20),vs varchar(50)) + partitioned by(s varchar(50)) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@varchar_single_partition +PREHOOK: query: insert into table varchar_single_partition partition(s='positive') select ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1 from alltypesorc where cint>0 limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@varchar_single_partition@s=positive +POSTHOOK: query: insert into table varchar_single_partition partition(s='positive') select ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1 from alltypesorc where cint>0 limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@varchar_single_partition@s=positive +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=positive).vb EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=positive).vd EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=positive).vf EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=positive).vi EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=positive).vs EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=positive).vsi EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=positive).vt EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: insert into table varchar_single_partition partition(s='negative') select ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1 from alltypesorc where cint<0 limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@varchar_single_partition@s=negative +POSTHOOK: query: insert into table varchar_single_partition partition(s='negative') select ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1 from alltypesorc where cint<0 limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@varchar_single_partition@s=negative +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=negative).vb EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=negative).vd EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=negative).vf EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=negative).vi EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=negative).vs EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=negative).vsi EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: varchar_single_partition PARTITION(s=negative).vt EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] +PREHOOK: query: alter table varchar_single_partition change column vs vs varchar(10) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@varchar_single_partition +PREHOOK: Output: default@varchar_single_partition +POSTHOOK: query: alter table varchar_single_partition change column vs vs varchar(10) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@varchar_single_partition +POSTHOOK: Output: default@varchar_single_partition +PREHOOK: query: create table varchar_ctas_1 stored as orc as select vs, length(vs) as c1,reverse(vs) as c2 from varchar_single_partition where s='positive' +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@varchar_single_partition +PREHOOK: Input: default@varchar_single_partition@s=positive +PREHOOK: Output: database:default +PREHOOK: Output: default@varchar_ctas_1 +POSTHOOK: query: create table varchar_ctas_1 stored as orc as select vs, length(vs) as c1,reverse(vs) as c2 from varchar_single_partition where s='positive' +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@varchar_single_partition +POSTHOOK: Input: default@varchar_single_partition@s=positive +POSTHOOK: Output: database:default +POSTHOOK: Output: default@varchar_ctas_1 +POSTHOOK: Lineage: varchar_ctas_1.c1 EXPRESSION [(varchar_single_partition)varchar_single_partition.FieldSchema(name:vs, type:varchar(10), comment:null), ] +POSTHOOK: Lineage: varchar_ctas_1.c2 EXPRESSION [(varchar_single_partition)varchar_single_partition.FieldSchema(name:vs, type:varchar(10), comment:null), ] +POSTHOOK: Lineage: varchar_ctas_1.vs SIMPLE [(varchar_single_partition)varchar_single_partition.FieldSchema(name:vs, type:varchar(10), comment:null), ] +PREHOOK: query: explain vectorization detail +select * from varchar_ctas_1 order by vs, c1, c2 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select * from varchar_ctas_1 order by vs, c1, c2 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: varchar_ctas_1 + Statistics: Num rows: 10 Data size: 2820 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:vs:varchar(10), 1:c1:int, 2:c2:string, 3:ROW__ID:struct] + Select Operator + expressions: vs (type: varchar(10)), c1 (type: int), c2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 10 Data size: 2820 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: varchar(10)), _col1 (type: int), _col2 (type: string) + sort order: +++ + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumnNums: [0, 1, 2] + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumnNums: [] + Statistics: Num rows: 10 Data size: 2820 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: vs:varchar(10), c1:int, c2:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: aaa + reduceColumnSortOrder: +++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:varchar(10), KEY.reducesinkkey1:int, KEY.reducesinkkey2:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: varchar(10)), KEY.reducesinkkey1 (type: int), KEY.reducesinkkey2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 10 Data size: 2820 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 2820 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select * from varchar_ctas_1 order by vs, c1, c2 +PREHOOK: type: QUERY +PREHOOK: Input: default@varchar_ctas_1 +#### A masked pattern was here #### +POSTHOOK: query: select * from varchar_ctas_1 order by vs, c1, c2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@varchar_ctas_1 +#### A masked pattern was here #### +cvLH6Eat2y 10 y2taE6HLvc +cvLH6Eat2y 10 y2taE6HLvc +cvLH6Eat2y 10 y2taE6HLvc +cvLH6Eat2y 10 y2taE6HLvc +cvLH6Eat2y 10 y2taE6HLvc +cvLH6Eat2y 10 y2taE6HLvc +cvLH6Eat2y 10 y2taE6HLvc +cvLH6Eat2y 10 y2taE6HLvc +cvLH6Eat2y 10 y2taE6HLvc +cvLH6Eat2y 10 y2taE6HLvc