diff --git ql/src/test/queries/clientnegative/strict_managed_tables4.q ql/src/test/queries/clientnegative/strict_managed_tables4.q index 34d477a6bc..0af00e5b57 100644 --- ql/src/test/queries/clientnegative/strict_managed_tables4.q +++ ql/src/test/queries/clientnegative/strict_managed_tables4.q @@ -14,3 +14,5 @@ describe strict_managed_tables6_tab1; CREATE TABLE strict_managed_tables6_tab2 STORED AS AVRO TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/smt6_schema.avsc'); + +dfs -rm ${system:test.tmp.dir}/smt6_schema.avsc; diff --git ql/src/test/queries/clientpositive/avro_add_column_extschema.q ql/src/test/queries/clientpositive/avro_add_column_extschema.q index 1234678078..8fdf6a91a0 100644 --- ql/src/test/queries/clientpositive/avro_add_column_extschema.q +++ ql/src/test/queries/clientpositive/avro_add_column_extschema.q @@ -26,12 +26,12 @@ ADD COLUMNS (age int); DESCRIBE avro_extschema_literal; -dfs -cp ${system:hive.root}data/files/grad.avsc ${system:test.tmp.dir}/; +dfs -cp ${system:hive.root}data/files/grad.avsc ${system:test.tmp.dir}/avro_add_column_extschema.avsc; CREATE TABLE avro_extschema_url STORED AS AVRO -TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/grad.avsc'); +TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avro_add_column_extschema.avsc'); DESCRIBE avro_extschema_url; @@ -45,4 +45,6 @@ DESCRIBE avro_extschema_url; ALTER TABLE avro_extschema_url ADD COLUMNS (col7 int); -DESCRIBE avro_extschema_url; \ No newline at end of file +DESCRIBE avro_extschema_url; + +dfs -rm ${system:test.tmp.dir}/avro_add_column_extschema.avsc; diff --git ql/src/test/queries/clientpositive/avro_alter_table_update_columns.q ql/src/test/queries/clientpositive/avro_alter_table_update_columns.q index 5b0bad5d75..d977713091 100644 --- ql/src/test/queries/clientpositive/avro_alter_table_update_columns.q +++ ql/src/test/queries/clientpositive/avro_alter_table_update_columns.q @@ -41,17 +41,17 @@ DESCRIBE avro_extschema_literal_n1; ALTER TABLE avro_extschema_literal_n1 UNSET TBLPROPERTIES ('avro.schema.literal'); DESCRIBE avro_extschema_literal_n1; -dfs -cp ${system:hive.root}data/files/grad.avsc ${system:test.tmp.dir}/; -dfs -cp ${system:hive.root}data/files/grad2.avsc ${system:test.tmp.dir}/; +dfs -cp ${system:hive.root}data/files/grad.avsc ${system:test.tmp.dir}/avro_alter_table_update_columns.avsc; +dfs -cp ${system:hive.root}data/files/grad2.avsc ${system:test.tmp.dir}/avro_alter_table_update_columns2.avsc; CREATE TABLE avro_extschema_url_n1 STORED AS AVRO - TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/grad.avsc'); + TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avro_alter_table_update_columns.avsc'); DESCRIBE avro_extschema_url_n1; ALTER TABLE avro_extschema_url_n1 SET - TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/grad2.avsc'); + TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avro_alter_table_update_columns2.avsc'); DESCRIBE avro_extschema_url_n1; ALTER TABLE avro_extschema_url_n1 UNSET TBLPROPERTIES ('avro.schema.url'); @@ -59,7 +59,7 @@ DESCRIBE avro_extschema_url_n1; ALTER TABLE avro_extschema_url_n1 SET - TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/grad2.avsc'); + TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avro_alter_table_update_columns2.avsc'); ALTER TABLE avro_extschema_url_n1 UPDATE COLUMNS CASCADE; DESCRIBE avro_extschema_url_n1; @@ -73,7 +73,7 @@ DESCRIBE avro_extschema_url_n1; CREATE TABLE avro_extschema_url_parted PARTITIONED BY (p1 string, p2 string) STORED AS AVRO - TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/grad.avsc'); + TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avro_alter_table_update_columns.avsc'); ALTER TABLE avro_extschema_url_parted ADD PARTITION (p1=2017, p2=11); ALTER TABLE avro_extschema_url_parted @@ -85,7 +85,7 @@ DESCRIBE avro_extschema_url_parted; --case: partial partition spec ALTER TABLE avro_extschema_url_parted SET - TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/grad2.avsc'); + TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avro_alter_table_update_columns2.avsc'); EXPLAIN ALTER TABLE avro_extschema_url_parted PARTITION (p1=2018) UPDATE COLUMNS; ALTER TABLE avro_extschema_url_parted PARTITION (p1=2018) UPDATE COLUMNS; ALTER TABLE avro_extschema_url_parted UNSET TBLPROPERTIES ('avro.schema.url'); @@ -97,7 +97,7 @@ DESCRIBE avro_extschema_url_parted PARTITION (p1=2018, p2=3); --case: table with restrict (no cascade) ALTER TABLE avro_extschema_url_parted SET - TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/grad2.avsc'); + TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avro_alter_table_update_columns2.avsc'); ALTER TABLE avro_extschema_url_parted UPDATE COLUMNS; ALTER TABLE avro_extschema_url_parted UNSET TBLPROPERTIES ('avro.schema.url'); @@ -108,7 +108,7 @@ DESCRIBE avro_extschema_url_parted PARTITION (p1=2018, p2=3); --case: full partition spec ALTER TABLE avro_extschema_url_parted SET - TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/grad2.avsc'); + TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avro_alter_table_update_columns2.avsc'); ALTER TABLE avro_extschema_url_parted PARTITION (p1=2017, p2=11) UPDATE COLUMNS; ALTER TABLE avro_extschema_url_parted UNSET TBLPROPERTIES ('avro.schema.url'); @@ -116,3 +116,6 @@ DESCRIBE avro_extschema_url_parted; DESCRIBE avro_extschema_url_parted PARTITION (p1=2017, p2=11); DESCRIBE avro_extschema_url_parted PARTITION (p1=2018, p2=2); DESCRIBE avro_extschema_url_parted PARTITION (p1=2018, p2=3); + +dfs -rm ${system:test.tmp.dir}/avro_alter_table_update_columns.avsc; +dfs -rm ${system:test.tmp.dir}/avro_alter_table_update_columns2.avsc; diff --git ql/src/test/queries/clientpositive/avro_extschema_insert.q ql/src/test/queries/clientpositive/avro_extschema_insert.q index c1980713b8..ef79d229df 100644 --- ql/src/test/queries/clientpositive/avro_extschema_insert.q +++ ql/src/test/queries/clientpositive/avro_extschema_insert.q @@ -1,9 +1,9 @@ set hive.exec.dynamic.partition.mode=nonstrict; -dfs -cp ${system:hive.root}data/files/table1.avsc ${system:test.tmp.dir}/; +dfs -cp ${system:hive.root}data/files/table1.avsc ${system:test.tmp.dir}/avro_extschema_insert.avsc; create external table avro_extschema_insert1 (name string) partitioned by (p1 string) - stored as avro tblproperties ('avro.schema.url'='${system:test.tmp.dir}/table1.avsc'); + stored as avro tblproperties ('avro.schema.url'='${system:test.tmp.dir}/avro_extschema_insert.avsc'); describe avro_extschema_insert1; @@ -14,7 +14,7 @@ insert overwrite table avro_extschema_insert1 partition (p1='part1') values ('co insert overwrite table avro_extschema_insert2 partition (p1) select * from avro_extschema_insert1; select * from avro_extschema_insert2; -dfs -rm ${system:test.tmp.dir}/table1.avsc; - drop table avro_extschema_insert1; drop table avro_extschema_insert2; + +dfs -rm ${system:test.tmp.dir}/avro_extschema_insert.avsc; diff --git ql/src/test/queries/clientpositive/avro_tableproperty_optimize.q ql/src/test/queries/clientpositive/avro_tableproperty_optimize.q index 0f576075a8..f32859f4d9 100644 --- ql/src/test/queries/clientpositive/avro_tableproperty_optimize.q +++ ql/src/test/queries/clientpositive/avro_tableproperty_optimize.q @@ -1,7 +1,7 @@ -- Check the queries work fine with the following property set to true SET hive.optimize.update.table.properties.from.serde=true; -dfs -cp ${system:hive.root}data/files/table1.avsc ${system:test.tmp.dir}/; +dfs -cp ${system:hive.root}data/files/table1.avsc ${system:test.tmp.dir}/avro_tableproperty_optimize.avsc; CREATE TABLE avro_extschema_literal_n0 STORED AS AVRO @@ -21,7 +21,7 @@ SELECT * FROM avro_extschema_literal_n0; CREATE TABLE avro_extschema_url_n0 STORED AS AVRO -TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/table1.avsc'); +TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avro_tableproperty_optimize.avsc'); INSERT INTO TABLE avro_extschema_url_n0 VALUES('s1', 1, 's2'); DESCRIBE EXTENDED avro_extschema_url_n0; @@ -52,7 +52,7 @@ SELECT * FROM avro_extschema_literal1; CREATE TABLE avro_extschema_url1 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' -WITH SERDEPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/table1.avsc') +WITH SERDEPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avro_tableproperty_optimize.avsc') STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' OUTPUTFORMAT @@ -61,3 +61,5 @@ INSERT INTO TABLE avro_extschema_url1 VALUES('s1', 1, 's2'); DESCRIBE EXTENDED avro_extschema_url1; SELECT * FROM avro_extschema_url1; + +dfs -rm ${system:test.tmp.dir}/avro_tableproperty_optimize.avsc; diff --git ql/src/test/queries/clientpositive/avrotblsjoin.q ql/src/test/queries/clientpositive/avrotblsjoin.q index dedd13799f..84c72fb91b 100644 --- ql/src/test/queries/clientpositive/avrotblsjoin.q +++ ql/src/test/queries/clientpositive/avrotblsjoin.q @@ -1,8 +1,8 @@ drop table if exists table1_n1; drop table if exists table1_1; -dfs -cp ${system:hive.root}data/files/table1.avsc ${system:test.tmp.dir}/; -dfs -cp ${system:hive.root}data/files/table1_1.avsc ${system:test.tmp.dir}/; +dfs -cp ${system:hive.root}data/files/table1.avsc ${system:test.tmp.dir}/avrotblsjoin.avsc; +dfs -cp ${system:hive.root}data/files/table1_1.avsc ${system:test.tmp.dir}/avrotblsjoin_1.avsc; create table table1_n1 ROW FORMAT SERDE @@ -11,7 +11,7 @@ create table table1_n1 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' - TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/table1.avsc'); + TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avrotblsjoin.avsc'); create table table1_1 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' @@ -19,10 +19,13 @@ create table table1_1 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' - TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/table1_1.avsc'); + TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/avrotblsjoin_1.avsc'); insert into table1_n1 values ("1", "2", "3"); insert into table1_1 values (1, "2"); set hive.auto.convert.join=false; set hive.strict.checks.type.safety=false; set hive.mapred.mode=nonstrict; select table1_n1.col1, table1_1.* from table1_n1 join table1_1 on table1_n1.col1=table1_1.col1 where table1_1.col1="1"; + +dfs -rm ${system:test.tmp.dir}/avrotblsjoin.avsc; +dfs -rm ${system:test.tmp.dir}/avrotblsjoin_1.avsc; diff --git ql/src/test/queries/clientpositive/compustat_avro.q ql/src/test/queries/clientpositive/compustat_avro.q index 8bf3344e1a..916b10c9c5 100644 --- ql/src/test/queries/clientpositive/compustat_avro.q +++ ql/src/test/queries/clientpositive/compustat_avro.q @@ -1,21 +1,23 @@ drop table if exists testAvro; -dfs -cp ${system:hive.root}data/files/grad.avsc ${system:test.tmp.dir}/; +dfs -cp ${system:hive.root}data/files/grad.avsc ${system:test.tmp.dir}/compustat_avro.avsc; -- File URIs using system:hive.root (using file:/) don't seem to work properly in DDL statements on Windows, -- so use dfs to copy them over to system:test.tmp.dir (which uses pfile:/), which does appear to work create table testAvro - ROW FORMAT SERDE - 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' - STORED AS INPUTFORMAT - 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' - OUTPUTFORMAT + ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' + STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' + OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' - TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/grad.avsc'); + TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/compustat_avro.avsc'); describe formatted testAvro col1; analyze table testAvro compute statistics for columns col1,col3; describe formatted testAvro col1; + +dfs -rm ${system:test.tmp.dir}/compustat_avro.avsc; diff --git ql/src/test/queries/clientpositive/rfc5424_parser.q ql/src/test/queries/clientpositive/rfc5424_parser.q index 929c8915a0..afaff3eea9 100644 --- ql/src/test/queries/clientpositive/rfc5424_parser.q +++ ql/src/test/queries/clientpositive/rfc5424_parser.q @@ -79,3 +79,5 @@ select decode(unmatched, 'UTF-8') from logs where unmatched is not null limit 10 drop table logs; drop table logs2; + +dfs -rm -r ${hiveconf:hive.metastore.warehouse.dir}/logs2/; diff --git ql/src/test/queries/clientpositive/rfc5424_parser_exception.q ql/src/test/queries/clientpositive/rfc5424_parser_exception.q index e7a0667d03..53f2039ec4 100644 --- ql/src/test/queries/clientpositive/rfc5424_parser_exception.q +++ ql/src/test/queries/clientpositive/rfc5424_parser_exception.q @@ -35,3 +35,5 @@ select length(decode(msg,'UTF-8')) as msg from logs2; select regexp_replace(regexp_replace(decode(msg,'UTF-8'), "at ", "at-"), "Caused by", "Caused-by") as msg from logs2; drop table logs2; + +dfs -rm -r ${hiveconf:hive.metastore.warehouse.dir}/logs2/; diff --git ql/src/test/queries/clientpositive/rfc5424_parser_file_pruning.q ql/src/test/queries/clientpositive/rfc5424_parser_file_pruning.q index a9dd32399c..7a55711032 100644 --- ql/src/test/queries/clientpositive/rfc5424_parser_file_pruning.q +++ ql/src/test/queries/clientpositive/rfc5424_parser_file_pruning.q @@ -80,3 +80,5 @@ select severity,count(*) from logs2 where ts between '2019-03-22 01:00:00.0' and select severity,count(*) from logs2 where dt='2019-03-22' group by severity; drop table logs2; + +dfs -rm -r ${hiveconf:hive.metastore.warehouse.dir}/logs2/; diff --git ql/src/test/queries/clientpositive/stats_noscan_2.q ql/src/test/queries/clientpositive/stats_noscan_2.q index 76d974f658..2bbbf40d72 100644 --- ql/src/test/queries/clientpositive/stats_noscan_2.q +++ ql/src/test/queries/clientpositive/stats_noscan_2.q @@ -1,7 +1,7 @@ --! qt:dataset:src dfs -cp ${system:hive.root}/data/files/ext_test ${system:test.tmp.dir}/analyze_external; --- test analyze table compute statistiscs [noscan] on external table +-- test analyze table compute statistiscs [noscan] on external table -- 1 test table CREATE EXTERNAL TABLE anaylyze_external (a INT) LOCATION '${system:test.tmp.dir}/analyze_external'; SELECT * FROM anaylyze_external; @@ -20,7 +20,7 @@ from src insert overwrite table texternal partition (insertdate='2008-01-01') se explain select count(*) from texternal where insertdate='2008-01-01'; select count(*) from texternal where insertdate='2008-01-01'; -- create external table -CREATE EXTERNAL TABLE anaylyze_external (key string, val string) partitioned by (insertdate string) LOCATION "pfile://${system:test.tmp.dir}/texternal"; +CREATE EXTERNAL TABLE anaylyze_external (key string, val string) partitioned by (insertdate string) LOCATION "pfile://${system:test.tmp.dir}/texternal"; ALTER TABLE anaylyze_external ADD PARTITION (insertdate='2008-01-01') location 'pfile://${system:test.tmp.dir}/texternal/2008-01-01'; select count(*) from anaylyze_external where insertdate='2008-01-01'; -- analyze @@ -33,5 +33,4 @@ explain select count(*) from anaylyze_external where insertdate='2008-01-01'; select count(*) from anaylyze_external where insertdate='2008-01-01'; drop table anaylyze_external; - - +dfs -rm ${system:test.tmp.dir}/analyze_external; diff --git ql/src/test/results/clientpositive/avro_add_column_extschema.q.out ql/src/test/results/clientpositive/avro_add_column_extschema.q.out index 670dc1e21d..59498ae2f2 100644 --- ql/src/test/results/clientpositive/avro_add_column_extschema.q.out +++ ql/src/test/results/clientpositive/avro_add_column_extschema.q.out @@ -159,3 +159,4 @@ col4 string col5 string col6 bigint col7 int +#### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/avro_tableproperty_optimize.q.out ql/src/test/results/clientpositive/avro_tableproperty_optimize.q.out index 6fc005abe8..8f6c02f01c 100644 --- ql/src/test/results/clientpositive/avro_tableproperty_optimize.q.out +++ ql/src/test/results/clientpositive/avro_tableproperty_optimize.q.out @@ -224,3 +224,4 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@avro_extschema_url1 #### A masked pattern was here #### s1 1 s2 +#### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/avrotblsjoin.q.out ql/src/test/results/clientpositive/avrotblsjoin.q.out index 8468bcb87a..f6579557aa 100644 --- ql/src/test/results/clientpositive/avrotblsjoin.q.out +++ ql/src/test/results/clientpositive/avrotblsjoin.q.out @@ -84,3 +84,4 @@ POSTHOOK: Input: default@table1_1 POSTHOOK: Input: default@table1_n1 #### A masked pattern was here #### 1 1 2 +#### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/compustat_avro.q.out ql/src/test/results/clientpositive/compustat_avro.q.out index a20e87caf7..3655dfab0e 100644 --- ql/src/test/results/clientpositive/compustat_avro.q.out +++ ql/src/test/results/clientpositive/compustat_avro.q.out @@ -3,22 +3,22 @@ PREHOOK: type: DROPTABLE POSTHOOK: query: drop table if exists testAvro POSTHOOK: type: DROPTABLE PREHOOK: query: create table testAvro - ROW FORMAT SERDE - 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' - STORED AS INPUTFORMAT - 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' - OUTPUTFORMAT + ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' + STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' + OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' #### A masked pattern was here #### PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@testAvro POSTHOOK: query: create table testAvro - ROW FORMAT SERDE - 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' - STORED AS INPUTFORMAT - 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' - OUTPUTFORMAT + ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' + STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' + OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' #### A masked pattern was here #### POSTHOOK: type: CREATETABLE @@ -72,3 +72,4 @@ num_falses bit_vector comment from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col1\":\"true\",\"col2\":\"true\",\"col3\":\"true\",\"col4\":\"true\",\"col5\":\"true\",\"col6\":\"true\"}} +#### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/avro_extschema_insert.q.out ql/src/test/results/clientpositive/llap/avro_extschema_insert.q.out index 2976ee3290..bb92c221b2 100644 --- ql/src/test/results/clientpositive/llap/avro_extschema_insert.q.out +++ ql/src/test/results/clientpositive/llap/avro_extschema_insert.q.out @@ -65,7 +65,6 @@ POSTHOOK: Input: default@avro_extschema_insert2 POSTHOOK: Input: default@avro_extschema_insert2@p1=part1 #### A masked pattern was here #### col1_value 1 col3_value part1 -#### A masked pattern was here #### PREHOOK: query: drop table avro_extschema_insert1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@avro_extschema_insert1 @@ -82,3 +81,4 @@ POSTHOOK: query: drop table avro_extschema_insert2 POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@avro_extschema_insert2 POSTHOOK: Output: default@avro_extschema_insert2 +#### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/rfc5424_parser_file_pruning.q.out ql/src/test/results/clientpositive/llap/rfc5424_parser_file_pruning.q.out index 3e3ac5cf7c..73c6f8e7ee 100644 --- ql/src/test/results/clientpositive/llap/rfc5424_parser_file_pruning.q.out +++ ql/src/test/results/clientpositive/llap/rfc5424_parser_file_pruning.q.out @@ -67,6 +67,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 0 RECORDS_OUT_OPERATOR_SEL_10: 0 RECORDS_OUT_OPERATOR_TS_0: 1087 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -94,6 +95,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 3 RECORDS_OUT_OPERATOR_SEL_10: 445 RECORDS_OUT_OPERATOR_TS_0: 1087 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -124,6 +126,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 4 RECORDS_OUT_OPERATOR_SEL_10: 274 RECORDS_OUT_OPERATOR_TS_0: 1087 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -154,6 +157,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 0 RECORDS_OUT_OPERATOR_SEL_10: 0 RECORDS_OUT_OPERATOR_TS_0: 1087 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -181,6 +185,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 4 RECORDS_OUT_OPERATOR_SEL_10: 719 RECORDS_OUT_OPERATOR_TS_0: 1087 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -210,6 +215,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_11: 5 RECORDS_OUT_OPERATOR_SEL_9: 756 RECORDS_OUT_OPERATOR_TS_0: 756 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -241,6 +247,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 0 RECORDS_OUT_OPERATOR_SEL_10: 0 RECORDS_OUT_OPERATOR_TS_0: 1087 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -254,7 +261,7 @@ PREHOOK: Input: default@logs2@dt=2019-03-22/ns=bar/app=hs2 #### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: Stage-1 HIVE COUNTERS: - CREATED_FILES: 2 + CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 RECORDS_IN_Map_1: 331 RECORDS_OUT_0: 2 @@ -268,6 +275,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 2 RECORDS_OUT_OPERATOR_SEL_10: 220 RECORDS_OUT_OPERATOR_TS_0: 1087 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -296,6 +304,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 0 RECORDS_OUT_OPERATOR_SEL_10: 0 RECORDS_OUT_OPERATOR_TS_0: 1087 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -309,7 +318,7 @@ PREHOOK: Input: default@logs2@dt=2019-03-22/ns=bar/app=hs2 #### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: Stage-1 HIVE COUNTERS: - CREATED_FILES: 2 + CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 RECORDS_IN_Map_1: 331 RECORDS_OUT_0: 2 @@ -323,6 +332,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 2 RECORDS_OUT_OPERATOR_SEL_10: 220 RECORDS_OUT_OPERATOR_TS_0: 1087 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -350,6 +360,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_11: 3 RECORDS_OUT_OPERATOR_SEL_9: 331 RECORDS_OUT_OPERATOR_TS_0: 331 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -379,6 +390,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 0 RECORDS_OUT_OPERATOR_SEL_10: 0 RECORDS_OUT_OPERATOR_TS_0: 756 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -406,6 +418,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 3 RECORDS_OUT_OPERATOR_SEL_10: 445 RECORDS_OUT_OPERATOR_TS_0: 756 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -436,6 +449,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 4 RECORDS_OUT_OPERATOR_SEL_10: 274 RECORDS_OUT_OPERATOR_TS_0: 756 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -466,6 +480,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 0 RECORDS_OUT_OPERATOR_SEL_10: 0 RECORDS_OUT_OPERATOR_TS_0: 756 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -493,6 +508,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 4 RECORDS_OUT_OPERATOR_SEL_10: 719 RECORDS_OUT_OPERATOR_TS_0: 756 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -522,6 +538,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_11: 5 RECORDS_OUT_OPERATOR_SEL_9: 756 RECORDS_OUT_OPERATOR_TS_0: 756 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -553,6 +570,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 0 RECORDS_OUT_OPERATOR_SEL_10: 0 RECORDS_OUT_OPERATOR_TS_0: 331 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -566,7 +584,7 @@ PREHOOK: Input: default@logs2@dt=2019-03-22/ns=bar/app=hs2 #### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: Stage-1 HIVE COUNTERS: - CREATED_FILES: 2 + CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 RECORDS_IN_Map_1: 331 RECORDS_OUT_0: 2 @@ -580,6 +598,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 2 RECORDS_OUT_OPERATOR_SEL_10: 220 RECORDS_OUT_OPERATOR_TS_0: 331 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -608,6 +627,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 0 RECORDS_OUT_OPERATOR_SEL_10: 0 RECORDS_OUT_OPERATOR_TS_0: 331 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -621,7 +641,7 @@ PREHOOK: Input: default@logs2@dt=2019-03-22/ns=bar/app=hs2 #### A masked pattern was here #### Stage-1 FILE SYSTEM COUNTERS: Stage-1 HIVE COUNTERS: - CREATED_FILES: 2 + CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 RECORDS_IN_Map_1: 331 RECORDS_OUT_0: 2 @@ -635,6 +655,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_12: 2 RECORDS_OUT_OPERATOR_SEL_10: 220 RECORDS_OUT_OPERATOR_TS_0: 331 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 2 @@ -662,6 +683,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_RS_11: 3 RECORDS_OUT_OPERATOR_SEL_9: 331 RECORDS_OUT_OPERATOR_TS_0: 331 + TOTAL_TABLE_ROWS_WRITTEN: 0 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1