From ea185c34f4306fdcd95de59a03928b51997b4e69 Mon Sep 17 00:00:00 2001 From: Ashutosh Chauhan Date: Fri, 15 May 2020 23:02:35 -0700 Subject: [PATCH] HIVE-23292 : Reduce PartitionDesc payload in MapWork --- .../insert_into_dynamic_partitions.q.out | 8 - .../clientpositive/insert_into_table.q.out | 76 --- .../insert_overwrite_directory.q.out | 32 - .../insert_overwrite_dynamic_partitions.q.out | 8 - .../insert_overwrite_table.q.out | 76 --- .../write_final_output_blobstore.q.out | 64 -- .../hadoop/hive/ql/plan/PartitionDesc.java | 7 +- .../apache/hadoop/hive/ql/plan/TableDesc.java | 2 - .../clientpositive/binary_output_format.q.out | 40 -- .../clientpositive/bucket_map_join_1.q.out | 5 - .../clientpositive/bucket_map_join_2.q.out | 5 - .../bucket_map_join_spark1.q.out | 46 -- .../bucket_map_join_spark2.q.out | 46 -- .../bucket_map_join_spark3.q.out | 46 -- .../bucket_map_join_spark4.q.out | 42 -- .../clientpositive/bucketcontext_1.q.out | 25 - .../clientpositive/bucketcontext_2.q.out | 25 - .../clientpositive/bucketcontext_3.q.out | 20 - .../clientpositive/bucketcontext_4.q.out | 20 - .../clientpositive/bucketcontext_5.q.out | 10 - .../clientpositive/bucketcontext_6.q.out | 20 - .../clientpositive/bucketcontext_7.q.out | 30 - .../clientpositive/bucketcontext_8.q.out | 30 - .../clientpositive/bucketmapjoin10.q.out | 20 - .../clientpositive/bucketmapjoin11.q.out | 40 -- .../clientpositive/bucketmapjoin12.q.out | 22 - .../clientpositive/bucketmapjoin13.q.out | 54 -- .../clientpositive/bucketmapjoin5.q.out | 84 --- .../clientpositive/bucketmapjoin8.q.out | 20 - .../clientpositive/bucketmapjoin9.q.out | 20 - .../bucketmapjoin_negative.q.out | 42 -- .../bucketmapjoin_negative2.q.out | 47 -- .../bucketmapjoin_negative3.q.out | 45 -- .../encryption_join_unencrypted_tbl.q.out | 32 - .../results/clientpositive/join_map_ppr.q.out | 80 --- .../clientpositive/list_bucket_dml_8.q.out | 37 -- .../llap/autoColumnStats_5a.q.out | 20 - .../llap/autoColumnStats_8.q.out | 60 -- .../llap/auto_sortmerge_join_1.q.out | 81 --- .../llap/auto_sortmerge_join_11.q.out | 126 ---- .../llap/auto_sortmerge_join_12.q.out | 45 -- .../llap/auto_sortmerge_join_2.q.out | 54 -- .../llap/auto_sortmerge_join_3.q.out | 81 --- .../llap/auto_sortmerge_join_4.q.out | 81 --- .../llap/auto_sortmerge_join_5.q.out | 78 --- .../llap/auto_sortmerge_join_7.q.out | 108 ---- .../llap/auto_sortmerge_join_8.q.out | 108 ---- .../results/clientpositive/llap/bucket1.q.out | 30 - .../results/clientpositive/llap/bucket2.q.out | 30 - .../results/clientpositive/llap/bucket3.q.out | 20 - .../results/clientpositive/llap/bucket4.q.out | 30 - .../clientpositive/llap/bucket_many.q.out | 30 - .../llap/bucket_map_join_tez2.q.out | 16 - .../llap/bucket_num_reducers.q.out | 30 - .../llap/bucket_num_reducers2.q.out | 30 - .../clientpositive/llap/bucketmapjoin1.q.out | 76 --- .../clientpositive/llap/bucketmapjoin2.q.out | 111 ---- .../clientpositive/llap/bucketmapjoin3.q.out | 68 -- .../clientpositive/llap/bucketmapjoin4.q.out | 84 --- .../clientpositive/llap/bucketmapjoin7.q.out | 18 - .../llap/cbo_rp_outer_join_ppr.q.out | 80 --- .../llap/cbo_stats_estimation.q.out | 28 - .../llap/column_table_stats.q.out | 69 -- .../llap/column_table_stats_orc.q.out | 49 -- .../llap/columnstats_partlvl.q.out | 22 - .../llap/columnstats_tbllvl.q.out | 28 - .../clientpositive/llap/comments.q.out | 32 - .../llap/constantPropagateForSubQuery.q.out | 32 - .../llap/disable_merge_for_bucketing.q.out | 30 - .../llap/display_colstats_tbllvl.q.out | 14 - .../llap/dynamic_semijoin_reduction.q.out | 44 -- .../clientpositive/llap/filter_aggr.q.out | 16 - .../llap/filter_join_breaktask.q.out | 36 -- .../clientpositive/llap/filter_union.q.out | 32 - .../clientpositive/llap/groupby_map_ppr.q.out | 40 -- .../llap/groupby_map_ppr_multi_distinct.q.out | 40 -- .../clientpositive/llap/groupby_ppr.q.out | 40 -- .../llap/groupby_ppr_multi_distinct.q.out | 80 --- .../llap/groupby_sort_1_23.q.out | 587 ------------------ .../clientpositive/llap/groupby_sort_6.q.out | 59 -- .../llap/groupby_sort_skew_1_23.q.out | 587 ------------------ .../llap/infer_bucket_sort_num_buckets.q.out | 30 - .../results/clientpositive/llap/input23.q.out | 12 - .../clientpositive/llap/input_part1.q.out | 28 - .../clientpositive/llap/input_part2.q.out | 56 -- .../clientpositive/llap/input_part7.q.out | 48 -- .../results/clientpositive/llap/join17.q.out | 48 -- .../results/clientpositive/llap/join26.q.out | 60 -- .../results/clientpositive/llap/join32.q.out | 60 -- .../clientpositive/llap/join32_lessSize.q.out | 260 -------- .../results/clientpositive/llap/join33.q.out | 60 -- .../results/clientpositive/llap/join34.q.out | 72 --- .../results/clientpositive/llap/join35.q.out | 72 --- .../results/clientpositive/llap/join9.q.out | 44 -- .../llap/join_filters_overlap.q.out | 160 ----- .../llap/list_bucket_dml_1.q.out | 42 -- .../llap/list_bucket_dml_10.q.out | 22 - .../llap/list_bucket_dml_11.q.out | 34 - .../llap/list_bucket_dml_12.q.out | 46 -- .../llap/list_bucket_dml_13.q.out | 34 - .../llap/list_bucket_dml_14.q.out | 32 - .../llap/list_bucket_dml_2.q.out | 42 -- .../llap/list_bucket_dml_3.q.out | 30 - .../llap/list_bucket_dml_4.q.out | 72 --- .../llap/list_bucket_dml_5.q.out | 54 -- .../llap/list_bucket_dml_6.q.out | 84 --- .../llap/list_bucket_dml_7.q.out | 84 --- .../llap/list_bucket_dml_9.q.out | 72 --- .../llap/list_bucket_query_oneskew_2.q.out | 36 -- .../clientpositive/llap/load_dyn_part8.q.out | 60 -- .../clientpositive/llap/louter_join_ppr.q.out | 160 ----- .../clientpositive/llap/mapjoin_mapjoin.q.out | 80 --- .../results/clientpositive/llap/merge3.q.out | 76 --- .../clientpositive/llap/metadataonly1.q.out | 240 ------- .../llap/murmur_hash_migration.q.out | 68 -- .../llap/murmur_hash_migration2.q.out | 8 - .../llap/offset_limit_global_optimizer.q.out | 384 ------------ .../llap/optimize_nullscan.q.out | 272 -------- .../clientpositive/llap/outer_join_ppr.q.out | 80 --- .../llap/parquet_vectorization_0.q.out | 80 --- .../results/clientpositive/llap/pcr.q.out | 568 ----------------- .../results/clientpositive/llap/pcs.q.out | 252 -------- .../clientpositive/llap/pointlookup2.q.out | 328 ---------- .../clientpositive/llap/pointlookup3.q.out | 288 --------- .../clientpositive/llap/pointlookup4.q.out | 48 -- .../clientpositive/llap/ppd_join_filter.q.out | 64 -- .../clientpositive/llap/ppd_union_view.q.out | 36 -- .../results/clientpositive/llap/ppd_vc.q.out | 112 ---- .../results/clientpositive/llap/push_or.q.out | 24 - .../llap/rand_partitionpruner2.q.out | 40 -- .../clientpositive/llap/router_join_ppr.q.out | 160 ----- .../results/clientpositive/llap/sample1.q.out | 28 - .../clientpositive/llap/sample10.q.out | 40 -- .../results/clientpositive/llap/sample5.q.out | 31 - .../results/clientpositive/llap/sample6.q.out | 136 ---- .../results/clientpositive/llap/sample7.q.out | 31 - .../results/clientpositive/llap/sample8.q.out | 60 -- .../clientpositive/llap/sharedwork.q.out | 112 ---- .../clientpositive/llap/smb_mapjoin_15.q.out | 120 ---- .../results/clientpositive/llap/stats0.q.out | 64 -- .../results/clientpositive/llap/stats11.q.out | 76 --- .../results/clientpositive/llap/stats12.q.out | 18 - .../results/clientpositive/llap/stats13.q.out | 9 - .../temp_table_alter_partition_coltype.q.out | 120 ---- .../temp_table_display_colstats_tbllvl.q.out | 12 - .../llap/tez_fixed_bucket_pruning.q.out | 156 ----- .../llap/topnkey_windowing.q.out | 32 - .../clientpositive/llap/vectorization_0.q.out | 80 --- .../clientpositive/regexp_extract.q.out | 16 - .../serde_user_properties.q.out | 24 - .../sort_merge_join_desc_5.q.out | 6 - .../sort_merge_join_desc_6.q.out | 12 - .../sort_merge_join_desc_7.q.out | 24 - .../temp_table_partition_pruning.q.out | 42 -- .../results/clientpositive/timestamp.q.out | 8 - .../clientpositive/transform_ppr1.q.out | 32 - .../clientpositive/transform_ppr2.q.out | 16 - .../truncate_column_list_bucket.q.out | 14 - .../results/clientpositive/udf_explode.q.out | 16 - .../results/clientpositive/udtf_explode.q.out | 16 - .../test/results/clientpositive/union22.q.out | 57 -- .../test/results/clientpositive/union24.q.out | 96 --- .../results/clientpositive/union_ppr.q.out | 16 - .../hive/metastore/utils/MetaStoreUtils.java | 22 +- 164 files changed, 15 insertions(+), 11235 deletions(-) diff --git a/itests/hive-blobstore/src/test/results/clientpositive/insert_into_dynamic_partitions.q.out b/itests/hive-blobstore/src/test/results/clientpositive/insert_into_dynamic_partitions.q.out index 3783c15203..fd82a0c0f8 100644 --- a/itests/hive-blobstore/src/test/results/clientpositive/insert_into_dynamic_partitions.q.out +++ b/itests/hive-blobstore/src/test/results/clientpositive/insert_into_dynamic_partitions.q.out @@ -129,11 +129,9 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns - columns.comments columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -141,7 +139,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.NullRowsInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns @@ -149,7 +146,6 @@ STAGE PLANS: columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -187,10 +183,8 @@ STAGE PLANS: name default.table1 partition_columns key partition_columns.types string - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 TotalFiles: 1 @@ -220,10 +214,8 @@ STAGE PLANS: name default.table1 partition_columns key partition_columns.types string - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 diff --git a/itests/hive-blobstore/src/test/results/clientpositive/insert_into_table.q.out b/itests/hive-blobstore/src/test/results/clientpositive/insert_into_table.q.out index 92c785c15a..ce071cba37 100644 --- a/itests/hive-blobstore/src/test/results/clientpositive/insert_into_table.q.out +++ b/itests/hive-blobstore/src/test/results/clientpositive/insert_into_table.q.out @@ -90,8 +90,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -100,14 +98,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 2 - numRows 2 - rawDataSize 2 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 TotalFiles: 1 @@ -145,11 +137,9 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns - columns.comments columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -157,7 +147,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.NullRowsInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns @@ -165,7 +154,6 @@ STAGE PLANS: columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -217,8 +205,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -227,14 +213,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 2 - numRows 2 - rawDataSize 2 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 @@ -263,8 +243,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -273,14 +251,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 2 - numRows 2 - rawDataSize 2 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 TotalFiles: 1 @@ -295,8 +267,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -305,21 +275,13 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 2 - numRows 2 - rawDataSize 2 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -328,14 +290,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 2 - numRows 2 - rawDataSize 2 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 name: default.table1 @@ -351,8 +307,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -361,14 +315,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 2 - numRows 2 - rawDataSize 2 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 @@ -387,8 +335,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -397,14 +343,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 2 - numRows 2 - rawDataSize 2 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 TotalFiles: 1 @@ -419,8 +359,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -429,21 +367,13 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 2 - numRows 2 - rawDataSize 2 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -452,14 +382,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 2 - numRows 2 - rawDataSize 2 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 name: default.table1 diff --git a/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_directory.q.out b/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_directory.q.out index a113a225ba..2eec9f2d78 100644 --- a/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_directory.q.out +++ b/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_directory.q.out @@ -120,31 +120,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns id,key - columns.comments columns.types int:string field.delim , #### A masked pattern was here #### name default.table1 - numFiles 2 - numRows 2 - rawDataSize 8 - serialization.ddl struct table1 { i32 id, string key} serialization.format , serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 10 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id,key @@ -153,14 +143,8 @@ STAGE PLANS: field.delim , #### A masked pattern was here #### name default.table1 - numFiles 2 - numRows 2 - rawDataSize 8 - serialization.ddl struct table1 { i32 id, string key} serialization.format , serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 10 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 name: default.table1 @@ -392,31 +376,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns id,key - columns.comments columns.types int:string field.delim , #### A masked pattern was here #### name default.table1 - numFiles 2 - numRows 2 - rawDataSize 8 - serialization.ddl struct table1 { i32 id, string key} serialization.format , serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 10 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id,key @@ -425,14 +399,8 @@ STAGE PLANS: field.delim , #### A masked pattern was here #### name default.table1 - numFiles 2 - numRows 2 - rawDataSize 8 - serialization.ddl struct table1 { i32 id, string key} serialization.format , serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 10 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 name: default.table1 diff --git a/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_dynamic_partitions.q.out b/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_dynamic_partitions.q.out index 91e95c4213..bdd82d4836 100644 --- a/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_dynamic_partitions.q.out +++ b/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_dynamic_partitions.q.out @@ -147,11 +147,9 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns - columns.comments columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -159,7 +157,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.NullRowsInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns @@ -167,7 +164,6 @@ STAGE PLANS: columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -205,10 +201,8 @@ STAGE PLANS: name default.table1 partition_columns key partition_columns.types string - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 TotalFiles: 1 @@ -238,10 +232,8 @@ STAGE PLANS: name default.table1 partition_columns key partition_columns.types string - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 diff --git a/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_table.q.out b/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_table.q.out index 96e77ed625..f65bf22eda 100644 --- a/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_table.q.out +++ b/itests/hive-blobstore/src/test/results/clientpositive/insert_overwrite_table.q.out @@ -98,8 +98,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -108,14 +106,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 1 - numRows 1 - rawDataSize 1 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 TotalFiles: 1 @@ -153,11 +145,9 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns - columns.comments columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -165,7 +155,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.NullRowsInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns @@ -173,7 +162,6 @@ STAGE PLANS: columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -225,8 +213,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -235,14 +221,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 1 - numRows 1 - rawDataSize 1 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 @@ -271,8 +251,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -281,14 +259,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 1 - numRows 1 - rawDataSize 1 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 TotalFiles: 1 @@ -303,8 +275,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -313,21 +283,13 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 1 - numRows 1 - rawDataSize 1 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -336,14 +298,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 1 - numRows 1 - rawDataSize 1 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 name: default.table1 @@ -359,8 +315,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -369,14 +323,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 1 - numRows 1 - rawDataSize 1 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 @@ -395,8 +343,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -405,14 +351,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 1 - numRows 1 - rawDataSize 1 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 TotalFiles: 1 @@ -427,8 +367,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -437,21 +375,13 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 1 - numRows 1 - rawDataSize 1 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns id @@ -460,14 +390,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/table1 name default.table1 - numFiles 1 - numRows 1 - rawDataSize 1 - serialization.ddl struct table1 { i32 id} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 name: default.table1 diff --git a/itests/hive-blobstore/src/test/results/clientpositive/write_final_output_blobstore.q.out b/itests/hive-blobstore/src/test/results/clientpositive/write_final_output_blobstore.q.out index 2dd98ef438..8bfc66795e 100644 --- a/itests/hive-blobstore/src/test/results/clientpositive/write_final_output_blobstore.q.out +++ b/itests/hive-blobstore/src/test/results/clientpositive/write_final_output_blobstore.q.out @@ -80,30 +80,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key - columns.comments columns.types int #### A masked pattern was here #### name default.hdfs_table - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct hdfs_table { i32 key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key @@ -111,14 +101,8 @@ STAGE PLANS: columns.types int #### A masked pattern was here #### name default.hdfs_table - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct hdfs_table { i32 key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.hdfs_table name: default.hdfs_table @@ -211,8 +195,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key @@ -221,14 +203,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/write_final_output_blobstore name default.blobstore_table - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct blobstore_table { i32 key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.blobstore_table TotalFiles: 1 @@ -281,8 +257,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key @@ -291,14 +265,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/write_final_output_blobstore name default.blobstore_table - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct blobstore_table { i32 key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.blobstore_table @@ -368,30 +336,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key - columns.comments columns.types int #### A masked pattern was here #### name default.hdfs_table - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct hdfs_table { i32 key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key @@ -399,14 +357,8 @@ STAGE PLANS: columns.types int #### A masked pattern was here #### name default.hdfs_table - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct hdfs_table { i32 key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.hdfs_table name: default.hdfs_table @@ -499,8 +451,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key @@ -509,14 +459,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/write_final_output_blobstore name default.blobstore_table - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct blobstore_table { i32 key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.blobstore_table TotalFiles: 1 @@ -569,8 +513,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key @@ -579,14 +521,8 @@ STAGE PLANS: #### A masked pattern was here #### location ### test.blobstore.path ###/write_final_output_blobstore name default.blobstore_table - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct blobstore_table { i32 key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.blobstore_table diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java index b226ab7b24..fda8f461e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java @@ -73,8 +73,6 @@ public void setBaseFileName(String baseFileName) { public PartitionDesc() { } - private final static org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(PartitionDesc.class); - public PartitionDesc(final TableDesc table, final LinkedHashMap partSpec) { this.tableDesc = table; setPartSpec(partSpec); @@ -92,10 +90,6 @@ public PartitionDesc(final Partition part, final TableDesc tableDesc) throws Hiv } } - public PartitionDesc(final Partition part) throws HiveException { - this(part, getTableDesc(part.getTable())); - } - /** * @param part Partition * @param tblDesc Table Descriptor @@ -223,6 +217,7 @@ public Map getPropertiesExplain() { } public void setProperties(final Properties properties) { + properties.remove("columns.comments"); if (properties instanceof CopyOnFirstWriteProperties) { this.properties = properties; } else { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java index 7993779562..0435477ce4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java @@ -45,8 +45,6 @@ */ public class TableDesc implements Serializable, Cloneable { - private static final Logger LOG = LoggerFactory.getLogger(TableDesc.class); - private static final long serialVersionUID = 1L; private Class inputFileFormatClass; private Class outputFileFormatClass; diff --git a/ql/src/test/results/clientpositive/binary_output_format.q.out b/ql/src/test/results/clientpositive/binary_output_format.q.out index b414360855..aa8a6c1aa6 100644 --- a/ql/src/test/results/clientpositive/binary_output_format.q.out +++ b/ql/src/test/results/clientpositive/binary_output_format.q.out @@ -101,8 +101,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"mydata":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns mydata @@ -113,12 +111,10 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest1_n109 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n109 TotalFiles: 1 @@ -152,30 +148,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -186,11 +176,9 @@ STAGE PLANS: numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -246,8 +234,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"mydata":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns mydata @@ -258,12 +244,10 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest1_n109 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n109 @@ -292,8 +276,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"mydata":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns mydata @@ -304,12 +286,10 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest1_n109 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n109 TotalFiles: 1 @@ -324,8 +304,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"mydata":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns mydata @@ -336,19 +314,15 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest1_n109 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"mydata":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns mydata @@ -359,12 +333,10 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest1_n109 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n109 name: default.dest1_n109 @@ -386,8 +358,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"mydata":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns mydata @@ -398,12 +368,10 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest1_n109 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n109 TotalFiles: 1 @@ -418,8 +386,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"mydata":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns mydata @@ -430,19 +396,15 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest1_n109 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"mydata":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns mydata @@ -453,12 +415,10 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest1_n109 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n109 name: default.dest1_n109 diff --git a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out index 440345fa53..392721c0e8 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out @@ -136,18 +136,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.table1_n9 numFiles 1 numRows 0 rawDataSize 0 - serialization.ddl struct table1_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -166,11 +163,9 @@ STAGE PLANS: numFiles 1 numRows 0 rawDataSize 0 - serialization.ddl struct table1_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1_n9 name: default.table1_n9 diff --git a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out index 20b27033b8..fcd88074c9 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out @@ -136,18 +136,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.table1 numFiles 1 numRows 0 rawDataSize 0 - serialization.ddl struct table1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -166,11 +163,9 @@ STAGE PLANS: numFiles 1 numRows 0 rawDataSize 0 - serialization.ddl struct table1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.table1 name: default.table1 diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out index 98a45ee2fd..9990f36519 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out @@ -159,7 +159,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n19 @@ -168,11 +167,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -189,10 +186,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n19 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n19 name: default.srcbucket_mapjoin_part_n19 @@ -258,8 +253,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -270,11 +263,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n9 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n9 TotalFiles: 1 @@ -326,7 +317,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n16 @@ -335,11 +325,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n16 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -356,10 +344,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n16 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n16 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n16 name: default.srcbucket_mapjoin_part_2_n16 @@ -375,7 +361,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n19 @@ -384,11 +369,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -405,10 +388,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n19 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n19 name: default.srcbucket_mapjoin_part_n19 @@ -424,8 +405,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -436,11 +415,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n9 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n9 @@ -627,7 +604,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n19 @@ -636,11 +612,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -657,10 +631,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n19 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n19 name: default.srcbucket_mapjoin_part_n19 @@ -726,8 +698,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -738,11 +708,9 @@ STAGE PLANS: numFiles 1 numRows 1028 rawDataSize 19022 - serialization.ddl struct bucketmapjoin_tmp_result_n9 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 20050 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n9 TotalFiles: 1 @@ -794,7 +762,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n16 @@ -803,11 +770,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n16 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -824,10 +789,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n16 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n16 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n16 name: default.srcbucket_mapjoin_part_2_n16 @@ -843,7 +806,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n19 @@ -852,11 +814,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -873,10 +833,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n19 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n19 name: default.srcbucket_mapjoin_part_n19 @@ -892,8 +850,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -904,11 +860,9 @@ STAGE PLANS: numFiles 1 numRows 1028 rawDataSize 19022 - serialization.ddl struct bucketmapjoin_tmp_result_n9 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 20050 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n9 diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out index 902c1291f3..c91302879d 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out @@ -143,7 +143,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n10 @@ -152,11 +151,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -173,10 +170,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n10 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n10 name: default.srcbucket_mapjoin_part_2_n10 @@ -242,8 +237,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -254,11 +247,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n5 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n5 TotalFiles: 1 @@ -310,7 +301,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n10 @@ -319,11 +309,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -340,10 +328,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n10 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n10 name: default.srcbucket_mapjoin_part_2_n10 @@ -359,7 +345,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n12 @@ -368,11 +353,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n12 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -389,10 +372,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n12 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n12 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n12 name: default.srcbucket_mapjoin_part_n12 @@ -408,8 +389,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -420,11 +399,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n5 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n5 @@ -611,7 +588,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n10 @@ -620,11 +596,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -641,10 +615,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n10 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n10 name: default.srcbucket_mapjoin_part_2_n10 @@ -710,8 +682,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -722,11 +692,9 @@ STAGE PLANS: numFiles 1 numRows 564 rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result_n5 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11067 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n5 TotalFiles: 1 @@ -778,7 +746,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n10 @@ -787,11 +754,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -808,10 +773,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n10 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n10 name: default.srcbucket_mapjoin_part_2_n10 @@ -827,7 +790,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n12 @@ -836,11 +798,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n12 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -857,10 +817,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n12 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n12 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n12 name: default.srcbucket_mapjoin_part_n12 @@ -876,8 +834,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -888,11 +844,9 @@ STAGE PLANS: numFiles 1 numRows 564 rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result_n5 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11067 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n5 diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out index 42a6998e95..221ca4091d 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out @@ -143,7 +143,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n4 @@ -152,11 +151,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -173,10 +170,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n4 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n4 name: default.srcbucket_mapjoin_part_n4 @@ -242,8 +237,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -254,11 +247,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n1 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n1 TotalFiles: 1 @@ -310,7 +301,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n3 @@ -319,11 +309,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -340,10 +328,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n3 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n3 name: default.srcbucket_mapjoin_part_2_n3 @@ -359,7 +345,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n4 @@ -368,11 +353,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -389,10 +372,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n4 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n4 name: default.srcbucket_mapjoin_part_n4 @@ -408,8 +389,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -420,11 +399,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n1 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n1 @@ -611,7 +588,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n4 @@ -620,11 +596,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -641,10 +615,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n4 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n4 name: default.srcbucket_mapjoin_part_n4 @@ -710,8 +682,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -722,11 +692,9 @@ STAGE PLANS: numFiles 1 numRows 564 rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result_n1 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11067 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n1 TotalFiles: 1 @@ -778,7 +746,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n3 @@ -787,11 +754,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -808,10 +773,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n3 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n3 name: default.srcbucket_mapjoin_part_2_n3 @@ -827,7 +790,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n4 @@ -836,11 +798,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -857,10 +817,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n4 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n4 name: default.srcbucket_mapjoin_part_n4 @@ -876,8 +834,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -888,11 +844,9 @@ STAGE PLANS: numFiles 1 numRows 564 rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result_n1 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11067 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n1 diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out index 5e6a28b4e8..7ff184d974 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out @@ -212,31 +212,26 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.tbl1_n0 numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -250,11 +245,9 @@ STAGE PLANS: numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl1_n0 name: default.tbl1_n0 @@ -264,31 +257,26 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.tbl2_n0 numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl2_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -302,11 +290,9 @@ STAGE PLANS: numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl2_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl2_n0 name: default.tbl2_n0 @@ -316,31 +302,26 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.tbl3 numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -354,11 +335,9 @@ STAGE PLANS: numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl3 name: default.tbl3 @@ -597,31 +576,26 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.tbl1_n0 numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -635,11 +609,9 @@ STAGE PLANS: numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl1_n0 name: default.tbl1_n0 @@ -649,31 +621,26 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.tbl2_n0 numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl2_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -687,11 +654,9 @@ STAGE PLANS: numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl2_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl2_n0 name: default.tbl2_n0 @@ -701,31 +666,26 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.tbl3 numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -739,11 +699,9 @@ STAGE PLANS: numFiles 2 numRows 10 rawDataSize 70 - serialization.ddl struct tbl3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl3 name: default.tbl3 diff --git a/ql/src/test/results/clientpositive/bucketcontext_1.q.out b/ql/src/test/results/clientpositive/bucketcontext_1.q.out index 35b6ae89ca..7a6c0121dd 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_1.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_1.q.out @@ -140,7 +140,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n14 @@ -149,11 +148,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_small_n14 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -171,10 +168,8 @@ STAGE PLANS: name default.bucket_small_n14 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n14 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n14 name: default.bucket_small_n14 @@ -256,7 +251,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n14 @@ -265,11 +259,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n14 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -287,10 +279,8 @@ STAGE PLANS: name default.bucket_big_n14 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n14 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n14 name: default.bucket_big_n14 @@ -306,7 +296,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n14 @@ -315,11 +304,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n14 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -337,10 +324,8 @@ STAGE PLANS: name default.bucket_big_n14 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n14 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n14 name: default.bucket_big_n14 @@ -474,7 +459,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n14 @@ -483,11 +467,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n14 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -505,10 +487,8 @@ STAGE PLANS: name default.bucket_big_n14 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n14 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n14 name: default.bucket_big_n14 @@ -524,7 +504,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n14 @@ -533,11 +512,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n14 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -555,10 +532,8 @@ STAGE PLANS: name default.bucket_big_n14 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n14 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n14 name: default.bucket_big_n14 diff --git a/ql/src/test/results/clientpositive/bucketcontext_2.q.out b/ql/src/test/results/clientpositive/bucketcontext_2.q.out index 442e93b778..b1a6b8ff6e 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_2.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_2.q.out @@ -124,7 +124,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n8 @@ -133,11 +132,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_small_n8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -155,10 +152,8 @@ STAGE PLANS: name default.bucket_small_n8 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n8 name: default.bucket_small_n8 @@ -240,7 +235,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n8 @@ -249,11 +243,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -271,10 +263,8 @@ STAGE PLANS: name default.bucket_big_n8 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n8 name: default.bucket_big_n8 @@ -290,7 +280,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n8 @@ -299,11 +288,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -321,10 +308,8 @@ STAGE PLANS: name default.bucket_big_n8 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n8 name: default.bucket_big_n8 @@ -458,7 +443,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n8 @@ -467,11 +451,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -489,10 +471,8 @@ STAGE PLANS: name default.bucket_big_n8 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n8 name: default.bucket_big_n8 @@ -508,7 +488,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n8 @@ -517,11 +496,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -539,10 +516,8 @@ STAGE PLANS: name default.bucket_big_n8 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n8 name: default.bucket_big_n8 diff --git a/ql/src/test/results/clientpositive/bucketcontext_3.q.out b/ql/src/test/results/clientpositive/bucketcontext_3.q.out index 8fa490d68d..70fc0bd1ee 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_3.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_3.q.out @@ -124,7 +124,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n4 @@ -133,11 +132,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_small_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -155,10 +152,8 @@ STAGE PLANS: name default.bucket_small_n4 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n4 name: default.bucket_small_n4 @@ -172,7 +167,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n4 @@ -181,11 +175,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_small_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -203,10 +195,8 @@ STAGE PLANS: name default.bucket_small_n4 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n4 name: default.bucket_small_n4 @@ -288,7 +278,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n4 @@ -297,11 +286,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -319,10 +306,8 @@ STAGE PLANS: name default.bucket_big_n4 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n4 name: default.bucket_big_n4 @@ -455,7 +440,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n4 @@ -464,11 +448,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -486,10 +468,8 @@ STAGE PLANS: name default.bucket_big_n4 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n4 name: default.bucket_big_n4 diff --git a/ql/src/test/results/clientpositive/bucketcontext_4.q.out b/ql/src/test/results/clientpositive/bucketcontext_4.q.out index c0f1017536..afd597e298 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_4.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_4.q.out @@ -140,7 +140,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small @@ -149,11 +148,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -171,10 +168,8 @@ STAGE PLANS: name default.bucket_small partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small name: default.bucket_small @@ -188,7 +183,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small @@ -197,11 +191,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -219,10 +211,8 @@ STAGE PLANS: name default.bucket_small partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small name: default.bucket_small @@ -304,7 +294,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big @@ -313,11 +302,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -335,10 +322,8 @@ STAGE PLANS: name default.bucket_big partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big name: default.bucket_big @@ -471,7 +456,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big @@ -480,11 +464,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -502,10 +484,8 @@ STAGE PLANS: name default.bucket_big partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/bucketcontext_5.q.out b/ql/src/test/results/clientpositive/bucketcontext_5.q.out index ac4496cf2f..a4d8cb5a2f 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_5.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_5.q.out @@ -166,18 +166,15 @@ STAGE PLANS: bucketing_version 1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n13 numFiles 2 numRows 0 rawDataSize 0 - serialization.ddl struct bucket_big_n13 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -196,11 +193,9 @@ STAGE PLANS: numFiles 2 numRows 0 rawDataSize 0 - serialization.ddl struct bucket_big_n13 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n13 name: default.bucket_big_n13 @@ -321,18 +316,15 @@ STAGE PLANS: bucketing_version 1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n13 numFiles 2 numRows 0 rawDataSize 0 - serialization.ddl struct bucket_big_n13 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -351,11 +343,9 @@ STAGE PLANS: numFiles 2 numRows 0 rawDataSize 0 - serialization.ddl struct bucket_big_n13 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n13 name: default.bucket_big_n13 diff --git a/ql/src/test/results/clientpositive/bucketcontext_6.q.out b/ql/src/test/results/clientpositive/bucketcontext_6.q.out index 39ed5cce50..f7b3bd661c 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_6.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_6.q.out @@ -188,7 +188,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n7 @@ -197,11 +196,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -219,10 +216,8 @@ STAGE PLANS: name default.bucket_big_n7 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n7 name: default.bucket_big_n7 @@ -238,7 +233,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n7 @@ -247,11 +241,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -269,10 +261,8 @@ STAGE PLANS: name default.bucket_big_n7 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n7 name: default.bucket_big_n7 @@ -402,7 +392,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n7 @@ -411,11 +400,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -433,10 +420,8 @@ STAGE PLANS: name default.bucket_big_n7 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n7 name: default.bucket_big_n7 @@ -452,7 +437,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n7 @@ -461,11 +445,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -483,10 +465,8 @@ STAGE PLANS: name default.bucket_big_n7 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n7 name: default.bucket_big_n7 diff --git a/ql/src/test/results/clientpositive/bucketcontext_7.q.out b/ql/src/test/results/clientpositive/bucketcontext_7.q.out index eb64514c9e..a860f4d328 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_7.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_7.q.out @@ -159,7 +159,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n2 @@ -168,11 +167,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_small_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -190,10 +187,8 @@ STAGE PLANS: name default.bucket_small_n2 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n2 name: default.bucket_small_n2 @@ -207,7 +202,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n2 @@ -216,11 +210,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_small_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -238,10 +230,8 @@ STAGE PLANS: name default.bucket_small_n2 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n2 name: default.bucket_small_n2 @@ -323,7 +313,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n2 @@ -332,11 +321,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -354,10 +341,8 @@ STAGE PLANS: name default.bucket_big_n2 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n2 name: default.bucket_big_n2 @@ -373,7 +358,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n2 @@ -382,11 +366,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -404,10 +386,8 @@ STAGE PLANS: name default.bucket_big_n2 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n2 name: default.bucket_big_n2 @@ -545,7 +525,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n2 @@ -554,11 +533,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -576,10 +553,8 @@ STAGE PLANS: name default.bucket_big_n2 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n2 name: default.bucket_big_n2 @@ -595,7 +570,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n2 @@ -604,11 +578,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -626,10 +598,8 @@ STAGE PLANS: name default.bucket_big_n2 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n2 name: default.bucket_big_n2 diff --git a/ql/src/test/results/clientpositive/bucketcontext_8.q.out b/ql/src/test/results/clientpositive/bucketcontext_8.q.out index 245b9618ea..8e73aa3d9f 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_8.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_8.q.out @@ -159,7 +159,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n10 @@ -168,11 +167,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_small_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -190,10 +187,8 @@ STAGE PLANS: name default.bucket_small_n10 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n10 name: default.bucket_small_n10 @@ -207,7 +202,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n10 @@ -216,11 +210,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_small_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -238,10 +230,8 @@ STAGE PLANS: name default.bucket_small_n10 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n10 name: default.bucket_small_n10 @@ -323,7 +313,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n10 @@ -332,11 +321,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -354,10 +341,8 @@ STAGE PLANS: name default.bucket_big_n10 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n10 name: default.bucket_big_n10 @@ -373,7 +358,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n10 @@ -382,11 +366,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -404,10 +386,8 @@ STAGE PLANS: name default.bucket_big_n10 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n10 name: default.bucket_big_n10 @@ -545,7 +525,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n10 @@ -554,11 +533,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -576,10 +553,8 @@ STAGE PLANS: name default.bucket_big_n10 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n10 name: default.bucket_big_n10 @@ -595,7 +570,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n10 @@ -604,11 +578,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct bucket_big_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -626,10 +598,8 @@ STAGE PLANS: name default.bucket_big_n10 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n10 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n10 name: default.bucket_big_n10 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin10.q.out b/ql/src/test/results/clientpositive/bucketmapjoin10.q.out index c4d607054f..8bad36aba7 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin10.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin10.q.out @@ -173,7 +173,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n13 @@ -182,11 +181,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n13 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -203,10 +200,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n13 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n13 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n13 name: default.srcbucket_mapjoin_part_2_n13 @@ -220,7 +215,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n13 @@ -229,11 +223,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n13 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -250,10 +242,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n13 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n13 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n13 name: default.srcbucket_mapjoin_part_2_n13 @@ -326,7 +316,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n6 @@ -335,11 +324,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -356,10 +343,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n6 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n6 name: default.srcbucket_mapjoin_part_1_n6 @@ -375,7 +360,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n6 @@ -384,11 +368,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -405,10 +387,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n6 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n6 name: default.srcbucket_mapjoin_part_1_n6 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin11.q.out b/ql/src/test/results/clientpositive/bucketmapjoin11.q.out index 16114c9549..66591fada7 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin11.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin11.q.out @@ -181,7 +181,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n6 @@ -190,11 +189,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -211,10 +208,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n6 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n6 name: default.srcbucket_mapjoin_part_2_n6 @@ -228,7 +223,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n6 @@ -237,11 +231,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -258,10 +250,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n6 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n6 name: default.srcbucket_mapjoin_part_2_n6 @@ -342,7 +332,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n2 @@ -351,11 +340,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -372,10 +359,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n2 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n2 name: default.srcbucket_mapjoin_part_1_n2 @@ -391,7 +376,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n2 @@ -400,11 +384,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -421,10 +403,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n2 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n2 name: default.srcbucket_mapjoin_part_1_n2 @@ -539,7 +519,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n6 @@ -548,11 +527,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -569,10 +546,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n6 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n6 name: default.srcbucket_mapjoin_part_2_n6 @@ -586,7 +561,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n6 @@ -595,11 +569,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -616,10 +588,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n6 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n6 name: default.srcbucket_mapjoin_part_2_n6 @@ -700,7 +670,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n2 @@ -709,11 +678,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -730,10 +697,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n2 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n2 name: default.srcbucket_mapjoin_part_1_n2 @@ -749,7 +714,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n2 @@ -758,11 +722,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -779,10 +741,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n2 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n2 name: default.srcbucket_mapjoin_part_1_n2 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin12.q.out b/ql/src/test/results/clientpositive/bucketmapjoin12.q.out index 5c453948cb..207c846d54 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin12.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin12.q.out @@ -138,7 +138,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n0 @@ -147,17 +146,14 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -167,10 +163,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n0 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n0 name: default.srcbucket_mapjoin_part_2_n0 @@ -251,7 +245,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 @@ -260,11 +253,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -281,10 +272,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 name: default.srcbucket_mapjoin_part_1 @@ -386,10 +375,8 @@ STAGE PLANS: partition values: part 1 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_3 @@ -398,11 +385,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -419,10 +404,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_3 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_3 name: default.srcbucket_mapjoin_part_3 @@ -495,7 +478,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 @@ -504,11 +486,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -525,10 +505,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin13.q.out b/ql/src/test/results/clientpositive/bucketmapjoin13.q.out index 693377f863..f46f97b5bd 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin13.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin13.q.out @@ -103,12 +103,10 @@ STAGE PLANS: partition values: part 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n18 @@ -117,11 +115,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_2_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -138,10 +134,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n18 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n18 name: default.srcbucket_mapjoin_part_2_n18 @@ -210,12 +204,10 @@ STAGE PLANS: partition values: part 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name value column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n8 @@ -224,11 +216,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_1_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -245,10 +235,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n8 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n8 name: default.srcbucket_mapjoin_part_1_n8 @@ -260,12 +248,10 @@ STAGE PLANS: partition values: part 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n8 @@ -274,11 +260,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_1_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -295,10 +279,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n8 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n8 name: default.srcbucket_mapjoin_part_1_n8 @@ -403,12 +385,10 @@ STAGE PLANS: partition values: part 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n18 @@ -417,11 +397,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_2_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -438,10 +416,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n18 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n18 name: default.srcbucket_mapjoin_part_2_n18 @@ -518,12 +494,10 @@ STAGE PLANS: partition values: part 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n8 @@ -532,11 +506,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_1_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -553,10 +525,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n8 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n8 name: default.srcbucket_mapjoin_part_1_n8 @@ -666,12 +636,10 @@ STAGE PLANS: partition values: part 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n18 @@ -680,11 +648,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_2_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -701,10 +667,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n18 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n18 name: default.srcbucket_mapjoin_part_2_n18 @@ -781,12 +745,10 @@ STAGE PLANS: partition values: part 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n8 @@ -795,11 +757,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_1_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -816,10 +776,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n8 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n8 name: default.srcbucket_mapjoin_part_1_n8 @@ -929,12 +887,10 @@ STAGE PLANS: partition values: part 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n18 @@ -943,11 +899,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_2_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -964,10 +918,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n18 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n18 name: default.srcbucket_mapjoin_part_2_n18 @@ -1044,12 +996,10 @@ STAGE PLANS: partition values: part 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n8 @@ -1058,11 +1008,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_1_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1079,10 +1027,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n8 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n8 name: default.srcbucket_mapjoin_part_1_n8 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out index b0e2931570..c5520a5bb6 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out @@ -264,8 +264,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -276,11 +274,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result TotalFiles: 1 @@ -322,7 +318,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n0 @@ -331,11 +326,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -352,10 +345,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n0 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n0 name: default.srcbucket_mapjoin_part_n0 @@ -371,7 +362,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n0 @@ -380,11 +370,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -401,10 +389,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n0 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n0 name: default.srcbucket_mapjoin_part_n0 @@ -461,8 +447,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -473,11 +457,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -506,8 +488,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -518,11 +498,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result TotalFiles: 1 @@ -537,8 +515,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -549,18 +525,14 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -571,11 +543,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result name: default.bucketmapjoin_tmp_result @@ -597,8 +567,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -609,11 +577,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result TotalFiles: 1 @@ -628,8 +594,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -640,18 +604,14 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -662,11 +622,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result name: default.bucketmapjoin_tmp_result @@ -886,8 +844,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -898,11 +854,9 @@ STAGE PLANS: numFiles 1 numRows 928 rawDataSize 17038 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result TotalFiles: 1 @@ -944,7 +898,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 @@ -953,11 +906,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -974,10 +925,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 name: default.srcbucket_mapjoin_part_2 @@ -993,7 +942,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 @@ -1002,11 +950,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1023,10 +969,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 name: default.srcbucket_mapjoin_part_2 @@ -1083,8 +1027,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1095,11 +1037,9 @@ STAGE PLANS: numFiles 1 numRows 928 rawDataSize 17038 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -1128,8 +1068,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1140,11 +1078,9 @@ STAGE PLANS: numFiles 1 numRows 928 rawDataSize 17038 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result TotalFiles: 1 @@ -1159,8 +1095,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1171,18 +1105,14 @@ STAGE PLANS: numFiles 1 numRows 928 rawDataSize 17038 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1193,11 +1123,9 @@ STAGE PLANS: numFiles 1 numRows 928 rawDataSize 17038 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result name: default.bucketmapjoin_tmp_result @@ -1219,8 +1147,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1231,11 +1157,9 @@ STAGE PLANS: numFiles 1 numRows 928 rawDataSize 17038 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result TotalFiles: 1 @@ -1250,8 +1174,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1262,18 +1184,14 @@ STAGE PLANS: numFiles 1 numRows 928 rawDataSize 17038 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1284,11 +1202,9 @@ STAGE PLANS: numFiles 1 numRows 928 rawDataSize 17038 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result name: default.bucketmapjoin_tmp_result diff --git a/ql/src/test/results/clientpositive/bucketmapjoin8.q.out b/ql/src/test/results/clientpositive/bucketmapjoin8.q.out index e1b658e1b4..190784ff1c 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin8.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin8.q.out @@ -103,7 +103,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n4 @@ -112,11 +111,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -133,10 +130,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n4 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n4 name: default.srcbucket_mapjoin_part_2_n4 @@ -217,7 +212,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n1 @@ -226,11 +220,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -247,10 +239,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n1 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n1 name: default.srcbucket_mapjoin_part_1_n1 @@ -364,7 +354,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n4 @@ -373,11 +362,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -394,10 +381,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n4 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n4 name: default.srcbucket_mapjoin_part_2_n4 @@ -478,7 +463,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n1 @@ -487,11 +471,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -508,10 +490,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n1 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n1 name: default.srcbucket_mapjoin_part_1_n1 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin9.q.out b/ql/src/test/results/clientpositive/bucketmapjoin9.q.out index 709c780fd1..c5b45113c9 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin9.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin9.q.out @@ -111,7 +111,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n12 @@ -120,11 +119,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n12 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -141,10 +138,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n12 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n12 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n12 name: default.srcbucket_mapjoin_part_2_n12 @@ -217,7 +212,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n5 @@ -226,11 +220,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -247,10 +239,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n5 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n5 name: default.srcbucket_mapjoin_part_1_n5 @@ -397,7 +387,6 @@ STAGE PLANS: bucket_field_name value column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n12 @@ -406,11 +395,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n12 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -427,10 +414,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n12 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n12 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n12 name: default.srcbucket_mapjoin_part_2_n12 @@ -503,7 +488,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n5 @@ -512,11 +496,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -533,10 +515,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n5 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n5 name: default.srcbucket_mapjoin_part_1_n5 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out index c90fa595dc..1ef6018041 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out @@ -112,7 +112,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n10 @@ -121,11 +120,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -142,10 +139,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n10 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n10 name: default.srcbucket_mapjoin_part_n10 @@ -203,8 +198,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -215,11 +208,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n4 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n4 TotalFiles: 1 @@ -260,18 +251,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n10 numFiles 2 numRows 0 rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -289,11 +277,9 @@ STAGE PLANS: numFiles 2 numRows 0 rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n10 name: default.srcbucket_mapjoin_n10 @@ -349,8 +335,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -361,11 +345,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n4 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n4 @@ -394,8 +376,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -406,11 +386,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n4 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n4 TotalFiles: 1 @@ -425,8 +403,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -437,18 +413,14 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n4 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -459,11 +431,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n4 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n4 name: default.bucketmapjoin_tmp_result_n4 @@ -485,8 +455,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -497,11 +465,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n4 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n4 TotalFiles: 1 @@ -516,8 +482,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -528,18 +492,14 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n4 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -550,11 +510,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n4 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n4 name: default.bucketmapjoin_tmp_result_n4 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out index 1adac2520c..948b0bd291 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out @@ -123,7 +123,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n7 @@ -132,11 +131,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n7 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -153,10 +150,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n7 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n7 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n7 name: default.srcbucket_mapjoin_part_2_n7 @@ -170,7 +165,6 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n7 @@ -179,11 +173,9 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n7 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -200,10 +192,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n7 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n7 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n7 name: default.srcbucket_mapjoin_part_2_n7 @@ -269,8 +259,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -281,11 +269,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n3 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n3 TotalFiles: 1 @@ -326,18 +312,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n5 numFiles 2 numRows 0 rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -355,11 +338,9 @@ STAGE PLANS: numFiles 2 numRows 0 rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n5 name: default.srcbucket_mapjoin_n5 @@ -415,8 +396,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -427,11 +406,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n3 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n3 @@ -460,8 +437,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -472,11 +447,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n3 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n3 TotalFiles: 1 @@ -491,8 +464,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -503,18 +474,14 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n3 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -525,11 +492,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n3 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n3 name: default.bucketmapjoin_tmp_result_n3 @@ -551,8 +516,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -563,11 +526,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n3 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n3 TotalFiles: 1 @@ -582,8 +543,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -594,18 +553,14 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n3 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -616,11 +571,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n3 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n3 name: default.bucketmapjoin_tmp_result_n3 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out index 2958ae11ed..33485ee089 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out @@ -254,18 +254,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.test1_n11 numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test1_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -284,11 +281,9 @@ STAGE PLANS: numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test1_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n11 name: default.test1_n11 @@ -413,18 +408,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.test2_n7 numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test2_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -443,11 +435,9 @@ STAGE PLANS: numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test2_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n7 name: default.test2_n7 @@ -564,18 +554,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.test1_n11 numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test1_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -594,11 +581,9 @@ STAGE PLANS: numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test1_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n11 name: default.test1_n11 @@ -717,18 +702,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.test1_n11 numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test1_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -747,11 +729,9 @@ STAGE PLANS: numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test1_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n11 name: default.test1_n11 @@ -870,18 +850,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.test1_n11 numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test1_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -900,11 +877,9 @@ STAGE PLANS: numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test1_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n11 name: default.test1_n11 @@ -1023,18 +998,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.test1_n11 numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test1_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1053,11 +1025,9 @@ STAGE PLANS: numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test1_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n11 name: default.test1_n11 @@ -1176,18 +1146,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.test2_n7 numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test2_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1206,11 +1173,9 @@ STAGE PLANS: numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test2_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n7 name: default.test2_n7 @@ -1329,18 +1294,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.test2_n7 numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test2_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1359,11 +1321,9 @@ STAGE PLANS: numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test2_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n7 name: default.test2_n7 @@ -1482,18 +1442,15 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.test3 numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1512,11 +1469,9 @@ STAGE PLANS: numFiles 3 numRows 0 rawDataSize 0 - serialization.ddl struct test3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 4200 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test3 name: default.test3 diff --git a/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out b/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out index d6115d84be..894750acbe 100644 --- a/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out +++ b/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out @@ -615,31 +615,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### location hdfs://### HDFS PATH ### name default.encrypted_table - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct encrypted_table { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -648,14 +638,8 @@ STAGE PLANS: #### A masked pattern was here #### location hdfs://### HDFS PATH ### name default.encrypted_table - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct encrypted_table { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.encrypted_table name: default.encrypted_table @@ -665,31 +649,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### location hdfs://### HDFS PATH ### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -698,14 +672,8 @@ STAGE PLANS: #### A masked pattern was here #### location hdfs://### HDFS PATH ### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/join_map_ppr.q.out b/ql/src/test/results/clientpositive/join_map_ppr.q.out index e3d8212352..c9176acd0f 100644 --- a/ql/src/test/results/clientpositive/join_map_ppr.q.out +++ b/ql/src/test/results/clientpositive/join_map_ppr.q.out @@ -124,8 +124,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -136,11 +134,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 TotalFiles: 1 @@ -179,11 +175,8 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -192,17 +185,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -212,10 +202,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -271,8 +259,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -283,11 +269,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 @@ -316,8 +300,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -328,11 +310,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 TotalFiles: 1 @@ -347,8 +327,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -359,18 +337,14 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -381,11 +355,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 name: default.dest_j1_n4 @@ -407,8 +379,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -419,11 +389,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 TotalFiles: 1 @@ -438,8 +406,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -450,18 +416,14 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -472,11 +434,9 @@ STAGE PLANS: numFiles 0 numRows 0 rawDataSize 0 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 name: default.dest_j1_n4 @@ -783,8 +743,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -795,11 +753,9 @@ STAGE PLANS: numFiles 1 numRows 107 rawDataSize 2018 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 TotalFiles: 1 @@ -838,11 +794,8 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -851,17 +804,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -871,10 +821,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -930,8 +878,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -942,11 +888,9 @@ STAGE PLANS: numFiles 1 numRows 107 rawDataSize 2018 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 @@ -975,8 +919,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -987,11 +929,9 @@ STAGE PLANS: numFiles 1 numRows 107 rawDataSize 2018 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 TotalFiles: 1 @@ -1006,8 +946,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -1018,18 +956,14 @@ STAGE PLANS: numFiles 1 numRows 107 rawDataSize 2018 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -1040,11 +974,9 @@ STAGE PLANS: numFiles 1 numRows 107 rawDataSize 2018 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 name: default.dest_j1_n4 @@ -1066,8 +998,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -1078,11 +1008,9 @@ STAGE PLANS: numFiles 1 numRows 107 rawDataSize 2018 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 TotalFiles: 1 @@ -1097,8 +1025,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -1109,18 +1035,14 @@ STAGE PLANS: numFiles 1 numRows 107 rawDataSize 2018 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -1131,11 +1053,9 @@ STAGE PLANS: numFiles 1 numRows 107 rawDataSize 2018 - serialization.ddl struct dest_j1_n4 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n4 name: default.dest_j1_n4 diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out index 985195850a..f7158b83d7 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out @@ -63,7 +63,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -73,10 +72,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n2 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part_n2 TotalFiles: 1 @@ -116,11 +113,8 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -129,17 +123,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -149,10 +140,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -165,11 +154,8 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -178,17 +164,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -198,10 +181,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -257,7 +238,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -267,10 +247,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n2 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part_n2 @@ -542,11 +520,8 @@ STAGE PLANS: ds 2008-04-08 hr a1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part_n2 @@ -555,17 +530,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 136 - serialization.ddl struct list_bucketing_dynamic_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 310 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -575,10 +547,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n2 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part_n2 name: default.list_bucketing_dynamic_part_n2 @@ -591,10 +561,8 @@ STAGE PLANS: ds 2008-04-08 hr b1 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part_n2 @@ -603,17 +571,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 9488 - serialization.ddl struct list_bucketing_dynamic_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 10586 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -623,10 +588,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n2 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part_n2 name: default.list_bucketing_dynamic_part_n2 diff --git a/ql/src/test/results/clientpositive/llap/autoColumnStats_5a.q.out b/ql/src/test/results/clientpositive/llap/autoColumnStats_5a.q.out index 24b40f0632..e92048ccb3 100644 --- a/ql/src/test/results/clientpositive/llap/autoColumnStats_5a.q.out +++ b/ql/src/test/results/clientpositive/llap/autoColumnStats_5a.q.out @@ -62,7 +62,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -72,10 +71,8 @@ STAGE PLANS: name default.partitioned1 partition_columns part partition_columns.types int - serialization.ddl struct partitioned1 { i32 a, string b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.partitioned1 TotalFiles: 1 @@ -118,11 +115,9 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns - columns.comments columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -130,7 +125,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.NullRowsInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns @@ -138,7 +132,6 @@ STAGE PLANS: columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -199,7 +192,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -209,10 +201,8 @@ STAGE PLANS: name default.partitioned1 partition_columns part partition_columns.types int - serialization.ddl struct partitioned1 { i32 a, string b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.partitioned1 @@ -331,7 +321,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -341,10 +330,8 @@ STAGE PLANS: name default.partitioned1 partition_columns part partition_columns.types int - serialization.ddl struct partitioned1 { i32 a, string b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.partitioned1 TotalFiles: 1 @@ -387,11 +374,9 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns - columns.comments columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -399,7 +384,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.NullRowsInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns @@ -407,7 +391,6 @@ STAGE PLANS: columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -468,7 +451,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -478,10 +460,8 @@ STAGE PLANS: name default.partitioned1 partition_columns part partition_columns.types int - serialization.ddl struct partitioned1 { i32 a, string b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.partitioned1 diff --git a/ql/src/test/results/clientpositive/llap/autoColumnStats_8.q.out b/ql/src/test/results/clientpositive/llap/autoColumnStats_8.q.out index 3bb474c927..48d1d92078 100644 --- a/ql/src/test/results/clientpositive/llap/autoColumnStats_8.q.out +++ b/ql/src/test/results/clientpositive/llap/autoColumnStats_8.q.out @@ -98,7 +98,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -108,10 +107,8 @@ STAGE PLANS: name default.nzhang_part8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct nzhang_part8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part8 TotalFiles: 1 @@ -160,7 +157,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -170,10 +166,8 @@ STAGE PLANS: name default.nzhang_part8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct nzhang_part8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part8 TotalFiles: 1 @@ -215,30 +209,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -248,10 +232,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -264,30 +246,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -297,10 +269,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -313,30 +283,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -346,10 +306,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -362,30 +320,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -395,10 +343,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -499,7 +445,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -509,10 +454,8 @@ STAGE PLANS: name default.nzhang_part8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct nzhang_part8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part8 @@ -533,7 +476,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -543,10 +485,8 @@ STAGE PLANS: name default.nzhang_part8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct nzhang_part8 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part8 diff --git a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_1.q.out b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_1.q.out index 096e225844..7566b3476f 100644 --- a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_1.q.out +++ b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_1.q.out @@ -168,20 +168,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n1 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -199,10 +192,8 @@ STAGE PLANS: name default.bucket_small_n1 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n1 name: default.bucket_small_n1 @@ -260,20 +251,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n1 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -291,10 +275,8 @@ STAGE PLANS: name default.bucket_big_n1 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n1 name: default.bucket_big_n1 @@ -309,20 +291,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n1 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -340,10 +315,8 @@ STAGE PLANS: name default.bucket_big_n1 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n1 name: default.bucket_big_n1 @@ -472,20 +445,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n1 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -503,10 +469,8 @@ STAGE PLANS: name default.bucket_small_n1 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n1 name: default.bucket_small_n1 @@ -564,20 +528,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n1 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -595,10 +552,8 @@ STAGE PLANS: name default.bucket_big_n1 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n1 name: default.bucket_big_n1 @@ -613,20 +568,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n1 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -644,10 +592,8 @@ STAGE PLANS: name default.bucket_big_n1 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n1 name: default.bucket_big_n1 @@ -776,20 +722,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n1 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -807,10 +746,8 @@ STAGE PLANS: name default.bucket_small_n1 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n1 name: default.bucket_small_n1 @@ -868,20 +805,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n1 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -899,10 +829,8 @@ STAGE PLANS: name default.bucket_big_n1 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n1 name: default.bucket_big_n1 @@ -917,20 +845,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n1 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -948,10 +869,8 @@ STAGE PLANS: name default.bucket_big_n1 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n1 name: default.bucket_big_n1 diff --git a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_11.q.out b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_11.q.out index 4d428a6040..9902ff3ada 100644 --- a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_11.q.out +++ b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_11.q.out @@ -179,20 +179,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n11 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -210,10 +203,8 @@ STAGE PLANS: name default.bucket_small_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n11 name: default.bucket_small_n11 @@ -261,20 +252,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n11 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -292,10 +276,8 @@ STAGE PLANS: name default.bucket_big_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n11 name: default.bucket_big_n11 @@ -311,20 +293,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n11 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -342,10 +317,8 @@ STAGE PLANS: name default.bucket_big_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n11 name: default.bucket_big_n11 @@ -501,20 +474,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n11 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -532,10 +498,8 @@ STAGE PLANS: name default.bucket_small_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n11 name: default.bucket_small_n11 @@ -593,20 +557,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n11 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -624,10 +581,8 @@ STAGE PLANS: name default.bucket_big_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n11 name: default.bucket_big_n11 @@ -642,20 +597,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n11 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -673,10 +621,8 @@ STAGE PLANS: name default.bucket_big_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n11 name: default.bucket_big_n11 @@ -794,20 +740,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n11 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -825,10 +764,8 @@ STAGE PLANS: name default.bucket_small_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n11 name: default.bucket_small_n11 @@ -882,20 +819,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n11 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -913,10 +843,8 @@ STAGE PLANS: name default.bucket_big_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n11 name: default.bucket_big_n11 @@ -931,20 +859,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n11 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -962,10 +883,8 @@ STAGE PLANS: name default.bucket_big_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n11 name: default.bucket_big_n11 @@ -1084,20 +1003,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n11 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1115,10 +1027,8 @@ STAGE PLANS: name default.bucket_small_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n11 name: default.bucket_small_n11 @@ -1168,20 +1078,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n11 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1199,10 +1102,8 @@ STAGE PLANS: name default.bucket_big_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n11 name: default.bucket_big_n11 @@ -1217,20 +1118,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n11 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1248,10 +1142,8 @@ STAGE PLANS: name default.bucket_big_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n11 name: default.bucket_big_n11 @@ -1296,20 +1188,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n11 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1327,10 +1212,8 @@ STAGE PLANS: name default.bucket_big_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n11 name: default.bucket_big_n11 @@ -1346,20 +1229,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n11 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1377,10 +1253,8 @@ STAGE PLANS: name default.bucket_big_n11 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n11 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n11 name: default.bucket_big_n11 diff --git a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_12.q.out b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_12.q.out index cf73803c06..d0d01fe93e 100644 --- a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_12.q.out +++ b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_12.q.out @@ -223,20 +223,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_medium - numFiles 3 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 170 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -254,10 +247,8 @@ STAGE PLANS: name default.bucket_medium partition_columns ds partition_columns.types string - serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium name: default.bucket_medium @@ -293,20 +284,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_medium - numFiles 3 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 170 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -324,10 +308,8 @@ STAGE PLANS: name default.bucket_medium partition_columns ds partition_columns.types string - serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium name: default.bucket_medium @@ -382,20 +364,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n15 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n15 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -413,10 +388,8 @@ STAGE PLANS: name default.bucket_big_n15 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n15 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n15 name: default.bucket_big_n15 @@ -431,20 +404,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n15 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n15 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -462,10 +428,8 @@ STAGE PLANS: name default.bucket_big_n15 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n15 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n15 name: default.bucket_big_n15 @@ -514,20 +478,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n15 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n15 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -545,10 +502,8 @@ STAGE PLANS: name default.bucket_small_n15 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n15 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n15 name: default.bucket_small_n15 diff --git a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_2.q.out b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_2.q.out index 60cfb52549..fbdf2d6f17 100644 --- a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_2.q.out +++ b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_2.q.out @@ -150,20 +150,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n3 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -181,10 +174,8 @@ STAGE PLANS: name default.bucket_small_n3 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n3 name: default.bucket_small_n3 @@ -242,20 +233,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n3 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -273,10 +257,8 @@ STAGE PLANS: name default.bucket_big_n3 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n3 name: default.bucket_big_n3 @@ -291,20 +273,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n3 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -322,10 +297,8 @@ STAGE PLANS: name default.bucket_big_n3 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n3 name: default.bucket_big_n3 @@ -454,20 +427,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n3 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -485,10 +451,8 @@ STAGE PLANS: name default.bucket_small_n3 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n3 name: default.bucket_small_n3 @@ -546,20 +510,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n3 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -577,10 +534,8 @@ STAGE PLANS: name default.bucket_big_n3 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n3 name: default.bucket_big_n3 @@ -595,20 +550,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n3 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -626,10 +574,8 @@ STAGE PLANS: name default.bucket_big_n3 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n3 name: default.bucket_big_n3 diff --git a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_3.q.out b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_3.q.out index a9b2f03838..be00f1d230 100644 --- a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_3.q.out +++ b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_3.q.out @@ -150,20 +150,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n9 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -181,10 +174,8 @@ STAGE PLANS: name default.bucket_small_n9 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n9 name: default.bucket_small_n9 @@ -199,20 +190,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n9 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -230,10 +214,8 @@ STAGE PLANS: name default.bucket_small_n9 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n9 name: default.bucket_small_n9 @@ -292,20 +274,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n9 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -323,10 +298,8 @@ STAGE PLANS: name default.bucket_big_n9 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n9 name: default.bucket_big_n9 @@ -454,20 +427,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n9 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -485,10 +451,8 @@ STAGE PLANS: name default.bucket_small_n9 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n9 name: default.bucket_small_n9 @@ -503,20 +467,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n9 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -534,10 +491,8 @@ STAGE PLANS: name default.bucket_small_n9 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n9 name: default.bucket_small_n9 @@ -596,20 +551,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n9 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -627,10 +575,8 @@ STAGE PLANS: name default.bucket_big_n9 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n9 name: default.bucket_big_n9 @@ -758,20 +704,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n9 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -789,10 +728,8 @@ STAGE PLANS: name default.bucket_small_n9 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n9 name: default.bucket_small_n9 @@ -807,20 +744,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n9 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -838,10 +768,8 @@ STAGE PLANS: name default.bucket_small_n9 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n9 name: default.bucket_small_n9 @@ -900,20 +828,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n9 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -931,10 +852,8 @@ STAGE PLANS: name default.bucket_big_n9 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n9 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n9 name: default.bucket_big_n9 diff --git a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_4.q.out b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_4.q.out index 6ef466fa20..20937079ef 100644 --- a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_4.q.out +++ b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_4.q.out @@ -166,20 +166,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n12 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -197,10 +190,8 @@ STAGE PLANS: name default.bucket_small_n12 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n12 name: default.bucket_small_n12 @@ -215,20 +206,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n12 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -246,10 +230,8 @@ STAGE PLANS: name default.bucket_small_n12 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n12 name: default.bucket_small_n12 @@ -308,20 +290,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n12 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -339,10 +314,8 @@ STAGE PLANS: name default.bucket_big_n12 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n12 name: default.bucket_big_n12 @@ -470,20 +443,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n12 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -501,10 +467,8 @@ STAGE PLANS: name default.bucket_small_n12 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n12 name: default.bucket_small_n12 @@ -519,20 +483,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n12 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -550,10 +507,8 @@ STAGE PLANS: name default.bucket_small_n12 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n12 name: default.bucket_small_n12 @@ -612,20 +567,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n12 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -643,10 +591,8 @@ STAGE PLANS: name default.bucket_big_n12 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n12 name: default.bucket_big_n12 @@ -774,20 +720,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n12 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -805,10 +744,8 @@ STAGE PLANS: name default.bucket_small_n12 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n12 name: default.bucket_small_n12 @@ -823,20 +760,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n12 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -854,10 +784,8 @@ STAGE PLANS: name default.bucket_small_n12 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n12 name: default.bucket_small_n12 @@ -916,20 +844,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n12 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -947,10 +868,8 @@ STAGE PLANS: name default.bucket_big_n12 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n12 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n12 name: default.bucket_big_n12 diff --git a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_5.q.out b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_5.q.out index f9c09d5c41..0ffe70ed20 100644 --- a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_5.q.out +++ b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_5.q.out @@ -125,18 +125,11 @@ STAGE PLANS: bucketing_version 1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n0 - numFiles 4 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_small_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -152,14 +145,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small_n0 - numFiles 4 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_small_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n0 name: default.bucket_small_n0 @@ -217,18 +204,11 @@ STAGE PLANS: bucketing_version 1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n0 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_big_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -244,14 +224,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big_n0 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_big_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n0 name: default.bucket_big_n0 @@ -367,18 +341,11 @@ STAGE PLANS: bucketing_version 1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n0 - numFiles 4 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_small_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -394,14 +361,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small_n0 - numFiles 4 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_small_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n0 name: default.bucket_small_n0 @@ -459,18 +420,11 @@ STAGE PLANS: bucketing_version 1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n0 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_big_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -486,14 +440,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big_n0 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_big_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n0 name: default.bucket_big_n0 @@ -609,18 +557,11 @@ STAGE PLANS: bucketing_version 1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n0 - numFiles 4 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_small_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -636,14 +577,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small_n0 - numFiles 4 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_small_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n0 name: default.bucket_small_n0 @@ -701,18 +636,11 @@ STAGE PLANS: bucketing_version 1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n0 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_big_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -728,14 +656,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big_n0 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_big_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n0 name: default.bucket_big_n0 diff --git a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_7.q.out b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_7.q.out index a20ac713bd..243435e366 100644 --- a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_7.q.out +++ b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_7.q.out @@ -185,20 +185,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n6 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -216,10 +209,8 @@ STAGE PLANS: name default.bucket_small_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n6 name: default.bucket_small_n6 @@ -234,20 +225,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n6 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -265,10 +249,8 @@ STAGE PLANS: name default.bucket_small_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n6 name: default.bucket_small_n6 @@ -327,20 +309,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n6 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -358,10 +333,8 @@ STAGE PLANS: name default.bucket_big_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n6 name: default.bucket_big_n6 @@ -376,20 +349,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n6 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -407,10 +373,8 @@ STAGE PLANS: name default.bucket_big_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n6 name: default.bucket_big_n6 @@ -543,20 +507,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n6 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -574,10 +531,8 @@ STAGE PLANS: name default.bucket_small_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n6 name: default.bucket_small_n6 @@ -592,20 +547,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n6 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -623,10 +571,8 @@ STAGE PLANS: name default.bucket_small_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n6 name: default.bucket_small_n6 @@ -685,20 +631,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n6 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -716,10 +655,8 @@ STAGE PLANS: name default.bucket_big_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n6 name: default.bucket_big_n6 @@ -734,20 +671,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n6 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -765,10 +695,8 @@ STAGE PLANS: name default.bucket_big_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n6 name: default.bucket_big_n6 @@ -901,20 +829,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n6 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -932,10 +853,8 @@ STAGE PLANS: name default.bucket_small_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n6 name: default.bucket_small_n6 @@ -950,20 +869,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n6 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -981,10 +893,8 @@ STAGE PLANS: name default.bucket_small_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n6 name: default.bucket_small_n6 @@ -1043,20 +953,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n6 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1074,10 +977,8 @@ STAGE PLANS: name default.bucket_big_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n6 name: default.bucket_big_n6 @@ -1092,20 +993,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n6 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1123,10 +1017,8 @@ STAGE PLANS: name default.bucket_big_n6 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n6 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n6 name: default.bucket_big_n6 diff --git a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_8.q.out b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_8.q.out index 9b5a8ef36f..efdb245f77 100644 --- a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_8.q.out +++ b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_8.q.out @@ -185,20 +185,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n5 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -216,10 +209,8 @@ STAGE PLANS: name default.bucket_small_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n5 name: default.bucket_small_n5 @@ -234,20 +225,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n5 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -265,10 +249,8 @@ STAGE PLANS: name default.bucket_small_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n5 name: default.bucket_small_n5 @@ -327,20 +309,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n5 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -358,10 +333,8 @@ STAGE PLANS: name default.bucket_big_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n5 name: default.bucket_big_n5 @@ -376,20 +349,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n5 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -407,10 +373,8 @@ STAGE PLANS: name default.bucket_big_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n5 name: default.bucket_big_n5 @@ -543,20 +507,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n5 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -574,10 +531,8 @@ STAGE PLANS: name default.bucket_small_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n5 name: default.bucket_small_n5 @@ -592,20 +547,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n5 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -623,10 +571,8 @@ STAGE PLANS: name default.bucket_small_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n5 name: default.bucket_small_n5 @@ -685,20 +631,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n5 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -716,10 +655,8 @@ STAGE PLANS: name default.bucket_big_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n5 name: default.bucket_big_n5 @@ -734,20 +671,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n5 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -765,10 +695,8 @@ STAGE PLANS: name default.bucket_big_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n5 name: default.bucket_big_n5 @@ -901,20 +829,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n5 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -932,10 +853,8 @@ STAGE PLANS: name default.bucket_small_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n5 name: default.bucket_small_n5 @@ -950,20 +869,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_small_n5 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -981,10 +893,8 @@ STAGE PLANS: name default.bucket_small_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_small_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small_n5 name: default.bucket_small_n5 @@ -1043,20 +953,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n5 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1074,10 +977,8 @@ STAGE PLANS: name default.bucket_big_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n5 name: default.bucket_big_n5 @@ -1092,20 +993,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.bucket_big_n5 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1123,10 +1017,8 @@ STAGE PLANS: name default.bucket_big_n5 partition_columns ds partition_columns.types string - serialization.ddl struct bucket_big_n5 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big_n5 name: default.bucket_big_n5 diff --git a/ql/src/test/results/clientpositive/llap/bucket1.q.out b/ql/src/test/results/clientpositive/llap/bucket1.q.out index 3ec523f461..706d391ea9 100644 --- a/ql/src/test/results/clientpositive/llap/bucket1.q.out +++ b/ql/src/test/results/clientpositive/llap/bucket1.q.out @@ -67,30 +67,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -98,14 +88,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -131,7 +115,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 100 bucket_field_name key bucketing_version 2 @@ -141,14 +124,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket1_1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket1_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket1_1 TotalFiles: 1 @@ -219,7 +196,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 100 bucket_field_name key bucketing_version 2 @@ -229,14 +205,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket1_1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket1_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket1_1 diff --git a/ql/src/test/results/clientpositive/llap/bucket2.q.out b/ql/src/test/results/clientpositive/llap/bucket2.q.out index 1c1db2f114..d40bd0107b 100644 --- a/ql/src/test/results/clientpositive/llap/bucket2.q.out +++ b/ql/src/test/results/clientpositive/llap/bucket2.q.out @@ -66,30 +66,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -97,14 +87,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -130,7 +114,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -140,14 +123,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 TotalFiles: 2 @@ -199,7 +176,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -209,14 +185,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 diff --git a/ql/src/test/results/clientpositive/llap/bucket3.q.out b/ql/src/test/results/clientpositive/llap/bucket3.q.out index 3b303bd76b..094286123e 100644 --- a/ql/src/test/results/clientpositive/llap/bucket3.q.out +++ b/ql/src/test/results/clientpositive/llap/bucket3.q.out @@ -67,30 +67,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -98,14 +88,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -143,10 +127,8 @@ STAGE PLANS: name default.bucket3_1 partition_columns ds partition_columns.types string - serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket3_1 TotalFiles: 2 @@ -238,10 +220,8 @@ STAGE PLANS: name default.bucket3_1 partition_columns ds partition_columns.types string - serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket3_1 diff --git a/ql/src/test/results/clientpositive/llap/bucket4.q.out b/ql/src/test/results/clientpositive/llap/bucket4.q.out index b3831e3169..7246806692 100644 --- a/ql/src/test/results/clientpositive/llap/bucket4.q.out +++ b/ql/src/test/results/clientpositive/llap/bucket4.q.out @@ -66,30 +66,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -97,14 +87,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -130,7 +114,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -141,14 +124,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket4_1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket4_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket4_1 TotalFiles: 2 @@ -200,7 +177,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -211,14 +187,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket4_1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket4_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket4_1 diff --git a/ql/src/test/results/clientpositive/llap/bucket_many.q.out b/ql/src/test/results/clientpositive/llap/bucket_many.q.out index b478b64c3a..75f3452e17 100644 --- a/ql/src/test/results/clientpositive/llap/bucket_many.q.out +++ b/ql/src/test/results/clientpositive/llap/bucket_many.q.out @@ -67,30 +67,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -98,14 +88,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -131,7 +115,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 256 bucket_field_name key bucketing_version 2 @@ -141,14 +124,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket_many - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_many { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_many TotalFiles: 256 @@ -219,7 +196,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 256 bucket_field_name key bucketing_version 2 @@ -229,14 +205,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket_many - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_many { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_many diff --git a/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out index 414b143ce5..54082b0ae3 100644 --- a/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out +++ b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out @@ -2524,30 +2524,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 bucketing_version 2 column.name.delimiter , columns join_col,filter_col - columns.comments columns.types string:string #### A masked pattern was here #### name default.my_dim - numFiles 1 - numRows 4 - rawDataSize 692 - serialization.ddl struct my_dim { string join_col, string filter_col} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 340 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns join_col,filter_col @@ -2555,14 +2545,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.my_dim - numFiles 1 - numRows 4 - rawDataSize 692 - serialization.ddl struct my_dim { string join_col, string filter_col} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 340 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.my_dim name: default.my_dim diff --git a/ql/src/test/results/clientpositive/llap/bucket_num_reducers.q.out b/ql/src/test/results/clientpositive/llap/bucket_num_reducers.q.out index ccc177bdae..dc05fb232d 100644 --- a/ql/src/test/results/clientpositive/llap/bucket_num_reducers.q.out +++ b/ql/src/test/results/clientpositive/llap/bucket_num_reducers.q.out @@ -64,30 +64,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -95,14 +85,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -128,7 +112,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 50 bucket_field_name key bucketing_version 2 @@ -138,14 +121,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket_nr - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_nr { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_nr TotalFiles: 50 @@ -164,7 +141,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 50 bucket_field_name key bucketing_version 2 @@ -174,14 +150,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket_nr - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket_nr { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_nr diff --git a/ql/src/test/results/clientpositive/llap/bucket_num_reducers2.q.out b/ql/src/test/results/clientpositive/llap/bucket_num_reducers2.q.out index 5ea3d6f14e..716d60a822 100644 --- a/ql/src/test/results/clientpositive/llap/bucket_num_reducers2.q.out +++ b/ql/src/test/results/clientpositive/llap/bucket_num_reducers2.q.out @@ -64,30 +64,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -95,14 +85,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -128,7 +112,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 3 bucket_field_name key bucketing_version 2 @@ -138,14 +121,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table_n4 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct test_table_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table_n4 TotalFiles: 3 @@ -197,7 +174,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 3 bucket_field_name key bucketing_version 2 @@ -207,14 +183,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table_n4 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct test_table_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table_n4 diff --git a/ql/src/test/results/clientpositive/llap/bucketmapjoin1.q.out b/ql/src/test/results/clientpositive/llap/bucketmapjoin1.q.out index a5b97e1faa..018cb75ef3 100644 --- a/ql/src/test/results/clientpositive/llap/bucketmapjoin1.q.out +++ b/ql/src/test/results/clientpositive/llap/bucketmapjoin1.q.out @@ -502,18 +502,11 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n1 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -528,14 +521,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n1 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n1 name: default.srcbucket_mapjoin_n1 @@ -584,20 +571,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n1 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -614,10 +594,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n1 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n1 name: default.srcbucket_mapjoin_part_n1 @@ -652,8 +630,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -661,14 +637,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n0 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n0 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n0 TotalFiles: 1 @@ -739,8 +709,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -748,14 +716,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n0 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n0 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n0 @@ -954,18 +916,11 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n1 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -980,14 +935,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n1 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n1 name: default.srcbucket_mapjoin_n1 @@ -1036,20 +985,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n1 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1066,10 +1008,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n1 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n1 name: default.srcbucket_mapjoin_part_n1 @@ -1104,8 +1044,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1113,14 +1051,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n0 - numFiles 1 - numRows 464 - rawDataSize 8519 - serialization.ddl struct bucketmapjoin_tmp_result_n0 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n0 TotalFiles: 1 @@ -1191,8 +1123,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1200,14 +1130,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n0 - numFiles 1 - numRows 464 - rawDataSize 8519 - serialization.ddl struct bucketmapjoin_tmp_result_n0 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n0 diff --git a/ql/src/test/results/clientpositive/llap/bucketmapjoin2.q.out b/ql/src/test/results/clientpositive/llap/bucketmapjoin2.q.out index f5a8dc2dfd..bc9afb00eb 100644 --- a/ql/src/test/results/clientpositive/llap/bucketmapjoin2.q.out +++ b/ql/src/test/results/clientpositive/llap/bucketmapjoin2.q.out @@ -175,20 +175,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n6 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -205,10 +198,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n6 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n6 name: default.srcbucket_mapjoin_part_n6 @@ -257,20 +248,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n5 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -287,10 +271,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n5 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n5 name: default.srcbucket_mapjoin_part_2_n5 @@ -325,8 +307,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -334,14 +314,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n2 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n2 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n2 TotalFiles: 1 @@ -412,8 +386,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -421,14 +393,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n2 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n2 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n2 @@ -634,20 +600,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n6 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -664,10 +623,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n6 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n6 name: default.srcbucket_mapjoin_part_n6 @@ -716,20 +673,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n5 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -746,10 +696,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n5 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n5 name: default.srcbucket_mapjoin_part_2_n5 @@ -784,8 +732,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -793,14 +739,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n2 - numFiles 1 - numRows 564 - rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result_n2 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11067 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n2 TotalFiles: 1 @@ -871,8 +811,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -880,14 +818,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n2 - numFiles 1 - numRows 564 - rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result_n2 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11067 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n2 @@ -1112,20 +1044,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n6 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1142,10 +1067,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n6 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n6 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n6 name: default.srcbucket_mapjoin_part_n6 @@ -1194,20 +1117,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n5 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1224,10 +1140,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n5 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n5 name: default.srcbucket_mapjoin_part_2_n5 @@ -1243,20 +1157,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n5 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1273,10 +1180,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n5 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n5 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n5 name: default.srcbucket_mapjoin_part_2_n5 @@ -1312,8 +1217,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1321,14 +1224,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n2 - numFiles 1 - numRows 564 - rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result_n2 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11067 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n2 TotalFiles: 1 @@ -1399,8 +1296,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1408,14 +1303,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n2 - numFiles 1 - numRows 564 - rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result_n2 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11067 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n2 diff --git a/ql/src/test/results/clientpositive/llap/bucketmapjoin3.q.out b/ql/src/test/results/clientpositive/llap/bucketmapjoin3.q.out index e11bb747be..c2ec2c208c 100644 --- a/ql/src/test/results/clientpositive/llap/bucketmapjoin3.q.out +++ b/ql/src/test/results/clientpositive/llap/bucketmapjoin3.q.out @@ -199,20 +199,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n11 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n11 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -229,10 +222,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n11 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n11 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n11 name: default.srcbucket_mapjoin_part_2_n11 @@ -281,20 +272,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n13 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n13 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -311,10 +295,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n13 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n13 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n13 name: default.srcbucket_mapjoin_part_n13 @@ -349,8 +331,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -358,14 +338,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n6 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n6 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n6 TotalFiles: 1 @@ -436,8 +410,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -445,14 +417,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n6 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n6 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n6 @@ -658,20 +624,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n11 - numFiles 2 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n11 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -688,10 +647,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n11 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n11 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n11 name: default.srcbucket_mapjoin_part_2_n11 @@ -740,20 +697,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n13 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n13 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -770,10 +720,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n13 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n13 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n13 name: default.srcbucket_mapjoin_part_n13 @@ -808,8 +756,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -817,14 +763,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n6 - numFiles 1 - numRows 564 - rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result_n6 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11067 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n6 TotalFiles: 1 @@ -895,8 +835,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -904,14 +842,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n6 - numFiles 1 - numRows 564 - rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result_n6 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11067 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n6 diff --git a/ql/src/test/results/clientpositive/llap/bucketmapjoin4.q.out b/ql/src/test/results/clientpositive/llap/bucketmapjoin4.q.out index b4d9d3d909..b1976d2d4e 100644 --- a/ql/src/test/results/clientpositive/llap/bucketmapjoin4.q.out +++ b/ql/src/test/results/clientpositive/llap/bucketmapjoin4.q.out @@ -192,18 +192,11 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n17 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n17 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -218,14 +211,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n17 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n17 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n17 name: default.srcbucket_mapjoin_n17 @@ -273,18 +260,11 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n17 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n17 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -299,14 +279,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n17 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n17 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n17 name: default.srcbucket_mapjoin_n17 @@ -341,8 +315,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -350,14 +322,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n8 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n8 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n8 TotalFiles: 1 @@ -428,8 +394,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -437,14 +401,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n8 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n8 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n8 @@ -631,18 +589,11 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n17 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n17 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -657,14 +608,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n17 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n17 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n17 name: default.srcbucket_mapjoin_n17 @@ -712,18 +657,11 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n17 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n17 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -738,14 +676,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n17 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n17 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n17 name: default.srcbucket_mapjoin_n17 @@ -780,8 +712,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -789,14 +719,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n8 - numFiles 1 - numRows 464 - rawDataSize 8519 - serialization.ddl struct bucketmapjoin_tmp_result_n8 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n8 TotalFiles: 1 @@ -867,8 +791,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -876,14 +798,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n8 - numFiles 1 - numRows 464 - rawDataSize 8519 - serialization.ddl struct bucketmapjoin_tmp_result_n8 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n8 diff --git a/ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out b/ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out index 5b476edf30..11c263244e 100644 --- a/ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out +++ b/ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out @@ -126,20 +126,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n4 - numFiles 2 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_1_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -156,10 +149,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n4 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcbucket_mapjoin_part_1_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n4 name: default.srcbucket_mapjoin_part_1_n4 @@ -205,20 +196,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n9 - numFiles 2 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_2_n9 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -235,10 +219,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n9 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcbucket_mapjoin_part_2_n9 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n9 name: default.srcbucket_mapjoin_part_2_n9 diff --git a/ql/src/test/results/clientpositive/llap/cbo_rp_outer_join_ppr.q.out b/ql/src/test/results/clientpositive/llap/cbo_rp_outer_join_ppr.q.out index c5e10689ea..3d0f5ad2ec 100644 --- a/ql/src/test/results/clientpositive/llap/cbo_rp_outer_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/llap/cbo_rp_outer_join_ppr.q.out @@ -75,30 +75,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -106,14 +96,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -159,30 +143,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -192,10 +166,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -208,30 +180,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -241,10 +203,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -414,30 +374,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -445,14 +395,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -498,30 +442,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -531,10 +465,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -547,30 +479,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -580,10 +502,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart diff --git a/ql/src/test/results/clientpositive/llap/cbo_stats_estimation.q.out b/ql/src/test/results/clientpositive/llap/cbo_stats_estimation.q.out index 03826ba703..9f2731d61a 100644 --- a/ql/src/test/results/clientpositive/llap/cbo_stats_estimation.q.out +++ b/ql/src/test/results/clientpositive/llap/cbo_stats_estimation.q.out @@ -80,24 +80,16 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns claim_rec_id,claim_invoice_num,typ_c - columns.comments columns.types bigint:string:int #### A masked pattern was here #### name default.claims - numFiles 0 - numRows 1154941534 - rawDataSize 1135307527922 - serialization.ddl struct claims { i64 claim_rec_id, string claim_invoice_num, i32 typ_c} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns claim_rec_id,claim_invoice_num,typ_c @@ -105,14 +97,8 @@ STAGE PLANS: columns.types bigint:string:int #### A masked pattern was here #### name default.claims - numFiles 0 - numRows 1154941534 - rawDataSize 1135307527922 - serialization.ddl struct claims { i64 claim_rec_id, string claim_invoice_num, i32 typ_c} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.claims name: default.claims @@ -224,24 +210,16 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns claim_rec_id,claim_invoice_num,typ_c - columns.comments columns.types bigint:string:int #### A masked pattern was here #### name default.claims - numFiles 0 - numRows 1154941534 - rawDataSize 1135307527922 - serialization.ddl struct claims { i64 claim_rec_id, string claim_invoice_num, i32 typ_c} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns claim_rec_id,claim_invoice_num,typ_c @@ -249,14 +227,8 @@ STAGE PLANS: columns.types bigint:string:int #### A masked pattern was here #### name default.claims - numFiles 0 - numRows 1154941534 - rawDataSize 1135307527922 - serialization.ddl struct claims { i64 claim_rec_id, string claim_invoice_num, i32 typ_c} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.claims name: default.claims diff --git a/ql/src/test/results/clientpositive/llap/column_table_stats.q.out b/ql/src/test/results/clientpositive/llap/column_table_stats.q.out index b6d3bb446f..fb4480863a 100644 --- a/ql/src/test/results/clientpositive/llap/column_table_stats.q.out +++ b/ql/src/test/results/clientpositive/llap/column_table_stats.q.out @@ -115,24 +115,16 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.s - numFiles 1 - numRows 0 - rawDataSize 0 - serialization.ddl struct s { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -140,14 +132,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.s - numFiles 1 - numRows 0 - rawDataSize 0 - serialization.ddl struct s { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.s name: default.s @@ -393,29 +379,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.spart - numFiles 1 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct spart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -425,10 +402,8 @@ STAGE PLANS: name default.spart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct spart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.spart name: default.spart @@ -441,29 +416,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.spart - numFiles 1 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct spart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -473,10 +439,8 @@ STAGE PLANS: name default.spart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct spart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.spart name: default.spart @@ -822,29 +786,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.spart - numFiles 1 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct spart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -854,10 +809,8 @@ STAGE PLANS: name default.spart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct spart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.spart name: default.spart @@ -870,29 +823,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.spart - numFiles 1 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct spart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -902,10 +846,8 @@ STAGE PLANS: name default.spart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct spart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.spart name: default.spart @@ -1248,29 +1190,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.spart - numFiles 1 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct spart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1280,10 +1213,8 @@ STAGE PLANS: name default.spart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct spart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.spart name: default.spart diff --git a/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out b/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out index 7ca66229fb..dc80b7ccad 100644 --- a/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out +++ b/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out @@ -114,30 +114,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.s_n0 - numFiles 1 - numRows 1 - rawDataSize 170 - serialization.ddl struct s_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 285 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -145,14 +135,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.s_n0 - numFiles 1 - numRows 1 - rawDataSize 170 - serialization.ddl struct s_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 285 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.s_n0 name: default.s_n0 @@ -396,26 +380,17 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 #### A masked pattern was here #### name default.spart_n0 - numFiles 1 - numRows 1 partition_columns ds/hr partition_columns.types string:string - rawDataSize 170 - serialization.ddl struct spart_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 285 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -425,10 +400,8 @@ STAGE PLANS: name default.spart_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct spart_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.spart_n0 name: default.spart_n0 @@ -441,26 +414,17 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 #### A masked pattern was here #### name default.spart_n0 - numFiles 1 - numRows 1 partition_columns ds/hr partition_columns.types string:string - rawDataSize 170 - serialization.ddl struct spart_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 285 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -470,10 +434,8 @@ STAGE PLANS: name default.spart_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct spart_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.spart_n0 name: default.spart_n0 @@ -814,26 +776,17 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 #### A masked pattern was here #### name default.spart_n0 - numFiles 1 - numRows 1 partition_columns ds/hr partition_columns.types string:string - rawDataSize 170 - serialization.ddl struct spart_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 285 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -843,10 +796,8 @@ STAGE PLANS: name default.spart_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct spart_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.spart_n0 name: default.spart_n0 diff --git a/ql/src/test/results/clientpositive/llap/columnstats_partlvl.q.out b/ql/src/test/results/clientpositive/llap/columnstats_partlvl.q.out index 4048ee9410..8152a824f1 100644 --- a/ql/src/test/results/clientpositive/llap/columnstats_partlvl.q.out +++ b/ql/src/test/results/clientpositive/llap/columnstats_partlvl.q.out @@ -184,30 +184,21 @@ STAGE PLANS: partition values: employeesalary 2000.0 properties: - bucket_count -1 column.name.delimiter , columns employeeid,employeename - columns.comments columns.types int:string field.delim | #### A masked pattern was here #### name default.employee_part - numFiles 1 - numRows 0 partition_columns employeesalary partition_columns.types double - rawDataSize 0 - serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 105 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns employeeid,employeename @@ -218,10 +209,8 @@ STAGE PLANS: name default.employee_part partition_columns employeesalary partition_columns.types double - serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.employee_part name: default.employee_part @@ -482,30 +471,21 @@ STAGE PLANS: partition values: employeesalary 4000.0 properties: - bucket_count -1 column.name.delimiter , columns employeeid,employeename - columns.comments columns.types int:string field.delim | #### A masked pattern was here #### name default.employee_part - numFiles 1 - numRows 0 partition_columns employeesalary partition_columns.types double - rawDataSize 0 - serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 105 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns employeeid,employeename @@ -516,10 +496,8 @@ STAGE PLANS: name default.employee_part partition_columns employeesalary partition_columns.types double - serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.employee_part name: default.employee_part diff --git a/ql/src/test/results/clientpositive/llap/columnstats_tbllvl.q.out b/ql/src/test/results/clientpositive/llap/columnstats_tbllvl.q.out index 83ae4146f0..f361fda252 100644 --- a/ql/src/test/results/clientpositive/llap/columnstats_tbllvl.q.out +++ b/ql/src/test/results/clientpositive/llap/columnstats_tbllvl.q.out @@ -173,25 +173,17 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns sourceip,desturl,visitdate,adrevenue,useragent,ccode,lcode,skeyword,avgtimeonsite - columns.comments columns.types string:string:string:float:string:string:string:string:int field.delim | #### A masked pattern was here #### name default.uservisits_web_text_none - numFiles 1 - numRows 0 - rawDataSize 0 - serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7060 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns sourceip,desturl,visitdate,adrevenue,useragent,ccode,lcode,skeyword,avgtimeonsite @@ -200,14 +192,8 @@ STAGE PLANS: field.delim | #### A masked pattern was here #### name default.uservisits_web_text_none - numFiles 1 - numRows 0 - rawDataSize 0 - serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7060 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.uservisits_web_text_none name: default.uservisits_web_text_none @@ -722,25 +708,17 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns sourceip,desturl,visitdate,adrevenue,useragent,ccode,lcode,skeyword,avgtimeonsite - columns.comments columns.types string:string:string:float:string:string:string:string:int field.delim | #### A masked pattern was here #### name dummydb.uservisits_in_dummy_db - numFiles 1 - numRows 0 - rawDataSize 0 - serialization.ddl struct uservisits_in_dummy_db { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7060 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns sourceip,desturl,visitdate,adrevenue,useragent,ccode,lcode,skeyword,avgtimeonsite @@ -749,14 +727,8 @@ STAGE PLANS: field.delim | #### A masked pattern was here #### name dummydb.uservisits_in_dummy_db - numFiles 1 - numRows 0 - rawDataSize 0 - serialization.ddl struct uservisits_in_dummy_db { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7060 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dummydb.uservisits_in_dummy_db name: dummydb.uservisits_in_dummy_db diff --git a/ql/src/test/results/clientpositive/llap/comments.q.out b/ql/src/test/results/clientpositive/llap/comments.q.out index f04bec9ef8..d0362be681 100644 --- a/ql/src/test/results/clientpositive/llap/comments.q.out +++ b/ql/src/test/results/clientpositive/llap/comments.q.out @@ -151,30 +151,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -182,14 +172,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -231,30 +215,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -262,14 +236,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/llap/constantPropagateForSubQuery.q.out b/ql/src/test/results/clientpositive/llap/constantPropagateForSubQuery.q.out index e20f7beec6..6baff579ee 100644 --- a/ql/src/test/results/clientpositive/llap/constantPropagateForSubQuery.q.out +++ b/ql/src/test/results/clientpositive/llap/constantPropagateForSubQuery.q.out @@ -64,30 +64,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -95,14 +85,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -138,30 +122,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -169,14 +143,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 diff --git a/ql/src/test/results/clientpositive/llap/disable_merge_for_bucketing.q.out b/ql/src/test/results/clientpositive/llap/disable_merge_for_bucketing.q.out index a597f2c075..946f0c4128 100644 --- a/ql/src/test/results/clientpositive/llap/disable_merge_for_bucketing.q.out +++ b/ql/src/test/results/clientpositive/llap/disable_merge_for_bucketing.q.out @@ -66,30 +66,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -97,14 +87,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -130,7 +114,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -140,14 +123,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1_n0 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket2_1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1_n0 TotalFiles: 2 @@ -199,7 +176,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -209,14 +185,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1_n0 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucket2_1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1_n0 diff --git a/ql/src/test/results/clientpositive/llap/display_colstats_tbllvl.q.out b/ql/src/test/results/clientpositive/llap/display_colstats_tbllvl.q.out index 92c58fb720..b5fbb21d6e 100644 --- a/ql/src/test/results/clientpositive/llap/display_colstats_tbllvl.q.out +++ b/ql/src/test/results/clientpositive/llap/display_colstats_tbllvl.q.out @@ -198,25 +198,17 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns sourceip,desturl,visitdate,adrevenue,useragent,ccode,lcode,skeyword,avgtimeonsite - columns.comments columns.types string:string:string:float:string:string:string:string:int field.delim | #### A masked pattern was here #### name default.uservisits_web_text_none_n0 - numFiles 1 - numRows 0 - rawDataSize 0 - serialization.ddl struct uservisits_web_text_none_n0 { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7060 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns sourceip,desturl,visitdate,adrevenue,useragent,ccode,lcode,skeyword,avgtimeonsite @@ -225,14 +217,8 @@ STAGE PLANS: field.delim | #### A masked pattern was here #### name default.uservisits_web_text_none_n0 - numFiles 1 - numRows 0 - rawDataSize 0 - serialization.ddl struct uservisits_web_text_none_n0 { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7060 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.uservisits_web_text_none_n0 name: default.uservisits_web_text_none_n0 diff --git a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out index e379b14e27..3efb830c4f 100644 --- a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out +++ b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out @@ -1784,26 +1784,17 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 #### A masked pattern was here #### name default.srcpart_date_n7 - numFiles 1 - numRows 1000 partition_columns ds partition_columns.types string - rawDataSize 176000 - serialization.ddl struct srcpart_date_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 3054 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1813,10 +1804,8 @@ STAGE PLANS: name default.srcpart_date_n7 partition_columns ds partition_columns.types string - serialization.ddl struct srcpart_date_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.srcpart_date_n7 name: default.srcpart_date_n7 @@ -1828,26 +1817,17 @@ STAGE PLANS: partition values: ds 2008-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 #### A masked pattern was here #### name default.srcpart_date_n7 - numFiles 1 - numRows 1000 partition_columns ds partition_columns.types string - rawDataSize 176000 - serialization.ddl struct srcpart_date_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 3054 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1857,10 +1837,8 @@ STAGE PLANS: name default.srcpart_date_n7 partition_columns ds partition_columns.types string - serialization.ddl struct srcpart_date_n7 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.srcpart_date_n7 name: default.srcpart_date_n7 @@ -1925,26 +1903,17 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key1":"true","value1":"true"}} - bucket_count -1 #### A masked pattern was here #### name default.srcpart_small_n3 - numFiles 0 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcpart_small_n3 { string key1, string value1} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,value1 @@ -1954,10 +1923,8 @@ STAGE PLANS: name default.srcpart_small_n3 partition_columns ds partition_columns.types string - serialization.ddl struct srcpart_small_n3 { string key1, string value1} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.srcpart_small_n3 name: default.srcpart_small_n3 @@ -1969,26 +1936,17 @@ STAGE PLANS: partition values: ds 2008-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key1":"true","value1":"true"}} - bucket_count -1 #### A masked pattern was here #### name default.srcpart_small_n3 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 3520 - serialization.ddl struct srcpart_small_n3 { string key1, string value1} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 471 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,value1 @@ -1998,10 +1956,8 @@ STAGE PLANS: name default.srcpart_small_n3 partition_columns ds partition_columns.types string - serialization.ddl struct srcpart_small_n3 { string key1, string value1} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.srcpart_small_n3 name: default.srcpart_small_n3 diff --git a/ql/src/test/results/clientpositive/llap/filter_aggr.q.out b/ql/src/test/results/clientpositive/llap/filter_aggr.q.out index 49472ccd6e..11c946bb7f 100644 --- a/ql/src/test/results/clientpositive/llap/filter_aggr.q.out +++ b/ql/src/test/results/clientpositive/llap/filter_aggr.q.out @@ -74,30 +74,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -105,14 +95,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/llap/filter_join_breaktask.q.out b/ql/src/test/results/clientpositive/llap/filter_join_breaktask.q.out index a143c0b6f7..4956d54638 100644 --- a/ql/src/test/results/clientpositive/llap/filter_join_breaktask.q.out +++ b/ql/src/test/results/clientpositive/llap/filter_join_breaktask.q.out @@ -95,30 +95,20 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask - numFiles 1 - numRows 25 partition_columns ds partition_columns.types string - rawDataSize 211 - serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 236 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -128,10 +118,8 @@ STAGE PLANS: name default.filter_join_breaktask partition_columns ds partition_columns.types string - serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask name: default.filter_join_breaktask @@ -176,30 +164,20 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask - numFiles 1 - numRows 25 partition_columns ds partition_columns.types string - rawDataSize 211 - serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 236 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -209,10 +187,8 @@ STAGE PLANS: name default.filter_join_breaktask partition_columns ds partition_columns.types string - serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask name: default.filter_join_breaktask @@ -256,30 +232,20 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask - numFiles 1 - numRows 25 partition_columns ds partition_columns.types string - rawDataSize 211 - serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 236 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -289,10 +255,8 @@ STAGE PLANS: name default.filter_join_breaktask partition_columns ds partition_columns.types string - serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask name: default.filter_join_breaktask diff --git a/ql/src/test/results/clientpositive/llap/filter_union.q.out b/ql/src/test/results/clientpositive/llap/filter_union.q.out index 50761ebcf6..2e472ef840 100644 --- a/ql/src/test/results/clientpositive/llap/filter_union.q.out +++ b/ql/src/test/results/clientpositive/llap/filter_union.q.out @@ -87,30 +87,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -118,14 +108,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -170,30 +154,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -201,14 +175,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/llap/groupby_map_ppr.q.out b/ql/src/test/results/clientpositive/llap/groupby_map_ppr.q.out index 09b9e2f37b..1f8d703728 100644 --- a/ql/src/test/results/clientpositive/llap/groupby_map_ppr.q.out +++ b/ql/src/test/results/clientpositive/llap/groupby_map_ppr.q.out @@ -90,30 +90,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -123,10 +113,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -139,30 +127,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -172,10 +150,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -208,8 +184,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"c1":"true","c2":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,c1,c2 @@ -217,14 +191,8 @@ STAGE PLANS: columns.types string:int:string #### A masked pattern was here #### name default.dest1_n144 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n144 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n144 TotalFiles: 1 @@ -295,8 +263,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"c1":"true","c2":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,c1,c2 @@ -304,14 +270,8 @@ STAGE PLANS: columns.types string:int:string #### A masked pattern was here #### name default.dest1_n144 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n144 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n144 diff --git a/ql/src/test/results/clientpositive/llap/groupby_map_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/llap/groupby_map_ppr_multi_distinct.q.out index c5830c769d..8f93eb0dcf 100644 --- a/ql/src/test/results/clientpositive/llap/groupby_map_ppr_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/llap/groupby_map_ppr_multi_distinct.q.out @@ -90,30 +90,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -123,10 +113,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -139,30 +127,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -172,10 +150,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -208,8 +184,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"c1":"true","c2":"true","c3":"true","c4":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,c1,c2,c3,c4 @@ -217,14 +191,8 @@ STAGE PLANS: columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1_n174 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n174 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n174 TotalFiles: 1 @@ -295,8 +263,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"c1":"true","c2":"true","c3":"true","c4":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,c1,c2,c3,c4 @@ -304,14 +270,8 @@ STAGE PLANS: columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1_n174 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n174 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n174 diff --git a/ql/src/test/results/clientpositive/llap/groupby_ppr.q.out b/ql/src/test/results/clientpositive/llap/groupby_ppr.q.out index b54f27c8bc..9b857193a2 100644 --- a/ql/src/test/results/clientpositive/llap/groupby_ppr.q.out +++ b/ql/src/test/results/clientpositive/llap/groupby_ppr.q.out @@ -82,30 +82,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -115,10 +105,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -131,30 +119,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -164,10 +142,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -200,8 +176,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"c1":"true","c2":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,c1,c2 @@ -209,14 +183,8 @@ STAGE PLANS: columns.types string:int:string #### A masked pattern was here #### name default.dest1_n79 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n79 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n79 TotalFiles: 1 @@ -281,8 +249,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"c1":"true","c2":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,c1,c2 @@ -290,14 +256,8 @@ STAGE PLANS: columns.types string:int:string #### A masked pattern was here #### name default.dest1_n79 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n79 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n79 diff --git a/ql/src/test/results/clientpositive/llap/groupby_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/llap/groupby_ppr_multi_distinct.q.out index c307d9c25a..0f12ff8348 100644 --- a/ql/src/test/results/clientpositive/llap/groupby_ppr_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/llap/groupby_ppr_multi_distinct.q.out @@ -82,30 +82,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -115,10 +105,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -131,30 +119,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -164,10 +142,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -200,8 +176,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"c1":"true","c2":"true","c3":"true","c4":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,c1,c2,c3,c4 @@ -209,14 +183,8 @@ STAGE PLANS: columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 TotalFiles: 1 @@ -281,8 +249,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"c1":"true","c2":"true","c3":"true","c4":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,c1,c2,c3,c4 @@ -290,14 +256,8 @@ STAGE PLANS: columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -426,30 +386,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -459,10 +409,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -475,30 +423,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -508,10 +446,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -544,8 +480,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"c1":"true","c2":"true","c3":"true","c4":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,c1,c2,c3,c4 @@ -553,14 +487,8 @@ STAGE PLANS: columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1 - numFiles 1 - numRows 10 - rawDataSize 184 - serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 194 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 TotalFiles: 1 @@ -629,8 +557,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"c1":"true","c2":"true","c3":"true","c4":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,c1,c2,c3,c4 @@ -638,14 +564,8 @@ STAGE PLANS: columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1 - numFiles 1 - numRows 10 - rawDataSize 184 - serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 194 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/llap/groupby_sort_1_23.q.out b/ql/src/test/results/clientpositive/llap/groupby_sort_1_23.q.out index 023929da92..4228454958 100644 --- a/ql/src/test/results/clientpositive/llap/groupby_sort_1_23.q.out +++ b/ql/src/test/results/clientpositive/llap/groupby_sort_1_23.q.out @@ -95,8 +95,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -104,14 +102,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 TotalFiles: 1 @@ -147,31 +139,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -182,14 +165,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -241,8 +218,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -250,14 +225,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 @@ -373,31 +342,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -408,14 +368,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -447,8 +401,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -456,14 +408,8 @@ STAGE PLANS: columns.types int:string:int #### A masked pattern was here #### name default.outputtbl2_n5 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl2_n5 { i32 key1, string key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl2_n5 TotalFiles: 1 @@ -534,8 +480,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -543,14 +487,8 @@ STAGE PLANS: columns.types int:string:int #### A masked pattern was here #### name default.outputtbl2_n5 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl2_n5 { i32 key1, string key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl2_n5 @@ -652,8 +590,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -661,14 +597,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 TotalFiles: 1 @@ -704,31 +634,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -739,14 +660,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -798,8 +713,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -807,14 +720,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 @@ -914,8 +821,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -923,14 +828,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 TotalFiles: 1 @@ -966,31 +865,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1001,14 +891,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -1060,8 +944,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -1069,14 +951,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 @@ -1184,8 +1060,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -1193,14 +1067,8 @@ STAGE PLANS: columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3_n2 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl3_n2 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3_n2 TotalFiles: 1 @@ -1236,31 +1104,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1271,14 +1130,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -1330,8 +1183,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -1339,14 +1190,8 @@ STAGE PLANS: columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3_n2 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl3_n2 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3_n2 @@ -1463,31 +1308,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1498,14 +1334,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -1537,8 +1367,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -1546,14 +1374,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n2 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl4_n2 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n2 TotalFiles: 1 @@ -1624,8 +1446,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -1633,14 +1453,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n2 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl4_n2 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n2 @@ -1751,31 +1565,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1786,14 +1591,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -1825,8 +1624,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -1834,14 +1631,8 @@ STAGE PLANS: columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3_n2 - numFiles 1 - numRows 5 - rawDataSize 25 - serialization.ddl struct outputtbl3_n2 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3_n2 TotalFiles: 1 @@ -1912,8 +1703,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -1921,14 +1710,8 @@ STAGE PLANS: columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3_n2 - numFiles 1 - numRows 5 - rawDataSize 25 - serialization.ddl struct outputtbl3_n2 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3_n2 @@ -2051,31 +1834,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -2086,14 +1860,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -2125,8 +1893,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2134,14 +1900,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 TotalFiles: 1 @@ -2212,8 +1972,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2221,14 +1979,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 @@ -2346,8 +2098,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2355,14 +2105,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 2 - numRows 5 - rawDataSize 17 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 22 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 TotalFiles: 1 @@ -2398,31 +2142,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -2433,14 +2168,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -2478,8 +2207,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2487,14 +2214,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 2 - numRows 5 - rawDataSize 17 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 22 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 TotalFiles: 1 @@ -2530,31 +2251,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -2565,14 +2277,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -2626,8 +2332,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2635,14 +2339,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 2 - numRows 5 - rawDataSize 17 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 22 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 @@ -2769,8 +2467,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2778,14 +2474,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 2 - numRows 10 - rawDataSize 30 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 40 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 TotalFiles: 1 @@ -2821,31 +2511,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -2856,14 +2537,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -2908,31 +2583,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -2943,14 +2609,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -3019,8 +2679,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -3028,14 +2686,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 2 - numRows 10 - rawDataSize 30 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 40 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 TotalFiles: 1 @@ -3075,8 +2727,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -3084,14 +2734,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 2 - numRows 10 - rawDataSize 30 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 40 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 @@ -3212,31 +2856,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -3247,14 +2882,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -3301,8 +2930,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -3310,14 +2937,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 3 - numRows 10 - rawDataSize 32 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 42 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 TotalFiles: 1 @@ -3351,31 +2972,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -3386,14 +2998,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -3445,8 +3051,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -3454,14 +3058,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 3 - numRows 10 - rawDataSize 32 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 42 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 @@ -3586,31 +3184,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -3621,14 +3210,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -3674,31 +3257,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -3709,14 +3283,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n80 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n80 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n80 name: default.t1_n80 @@ -3877,31 +3445,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t2_n49 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n49 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val @@ -3912,14 +3471,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t2_n49 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n49 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2_n49 name: default.t2_n49 @@ -3951,8 +3504,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -3960,14 +3511,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 1 - numRows 4 - rawDataSize 12 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 16 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 TotalFiles: 1 @@ -4038,8 +3583,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -4047,14 +3590,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n18 - numFiles 1 - numRows 4 - rawDataSize 12 - serialization.ddl struct outputtbl1_n18 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 16 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n18 @@ -4154,8 +3691,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -4163,14 +3698,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n2 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n2 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n2 TotalFiles: 1 @@ -4206,31 +3735,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t2_n49 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n49 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val @@ -4241,14 +3761,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t2_n49 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n49 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2_n49 name: default.t2_n49 @@ -4300,8 +3814,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -4309,14 +3821,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n2 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n2 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n2 @@ -4427,8 +3933,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true","key4":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,key4,cnt @@ -4436,14 +3940,8 @@ STAGE PLANS: columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5_n2 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl5_n2 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5_n2 TotalFiles: 1 @@ -4479,31 +3977,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t2_n49 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n49 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val @@ -4514,14 +4003,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t2_n49 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n49 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2_n49 name: default.t2_n49 @@ -4573,8 +4056,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true","key4":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,key4,cnt @@ -4582,14 +4063,8 @@ STAGE PLANS: columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5_n2 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl5_n2 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5_n2 @@ -4699,8 +4174,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -4708,14 +4181,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n2 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n2 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n2 TotalFiles: 1 @@ -4751,31 +4218,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t2_n49 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n49 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val @@ -4786,14 +4244,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t2_n49 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n49 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2_n49 name: default.t2_n49 @@ -4845,8 +4297,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -4854,14 +4304,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n2 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n2 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n2 @@ -4978,8 +4422,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -4987,14 +4429,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n2 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n2 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n2 TotalFiles: 1 @@ -5030,31 +4466,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t2_n49 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n49 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val @@ -5065,14 +4492,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t2_n49 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n49 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2_n49 name: default.t2_n49 @@ -5124,8 +4545,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -5133,14 +4552,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n2 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n2 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n2 diff --git a/ql/src/test/results/clientpositive/llap/groupby_sort_6.q.out b/ql/src/test/results/clientpositive/llap/groupby_sort_6.q.out index 68f196cd03..08d94dc5c5 100644 --- a/ql/src/test/results/clientpositive/llap/groupby_sort_6.q.out +++ b/ql/src/test/results/clientpositive/llap/groupby_sort_6.q.out @@ -106,8 +106,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -115,14 +113,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n15 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1_n15 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n15 TotalFiles: 1 @@ -193,8 +185,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -202,14 +192,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n15 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1_n15 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n15 @@ -344,8 +328,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -353,14 +335,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n15 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1_n15 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n15 TotalFiles: 1 @@ -431,8 +407,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -440,14 +414,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n15 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1_n15 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n15 @@ -556,29 +524,20 @@ STAGE PLANS: partition values: ds 2 properties: - bucket_count -1 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n61 - numFiles 1 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct t1_n61 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,val @@ -588,10 +547,8 @@ STAGE PLANS: name default.t1_n61 partition_columns ds partition_columns.types string - serialization.ddl struct t1_n61 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n61 name: default.t1_n61 @@ -623,8 +580,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -632,14 +587,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n15 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1_n15 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n15 TotalFiles: 1 @@ -710,8 +659,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -719,14 +666,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n15 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1_n15 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n15 diff --git a/ql/src/test/results/clientpositive/llap/groupby_sort_skew_1_23.q.out b/ql/src/test/results/clientpositive/llap/groupby_sort_skew_1_23.q.out index 5b9ee467a0..0e528ffa37 100644 --- a/ql/src/test/results/clientpositive/llap/groupby_sort_skew_1_23.q.out +++ b/ql/src/test/results/clientpositive/llap/groupby_sort_skew_1_23.q.out @@ -95,8 +95,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -104,14 +102,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 TotalFiles: 1 @@ -147,31 +139,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -182,14 +165,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -241,8 +218,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -250,14 +225,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 @@ -374,31 +343,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -409,14 +369,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -469,8 +423,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -478,14 +430,8 @@ STAGE PLANS: columns.types int:string:int #### A masked pattern was here #### name default.outputtbl2_n3 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl2_n3 { i32 key1, string key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl2_n3 TotalFiles: 1 @@ -556,8 +502,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -565,14 +509,8 @@ STAGE PLANS: columns.types int:string:int #### A masked pattern was here #### name default.outputtbl2_n3 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl2_n3 { i32 key1, string key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl2_n3 @@ -674,8 +612,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -683,14 +619,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 TotalFiles: 1 @@ -726,31 +656,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -761,14 +682,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -820,8 +735,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -829,14 +742,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 @@ -936,8 +843,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -945,14 +850,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 TotalFiles: 1 @@ -988,31 +887,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1023,14 +913,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -1082,8 +966,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -1091,14 +973,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 @@ -1206,8 +1082,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -1215,14 +1089,8 @@ STAGE PLANS: columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl3_n1 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3_n1 TotalFiles: 1 @@ -1258,31 +1126,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1293,14 +1152,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -1352,8 +1205,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -1361,14 +1212,8 @@ STAGE PLANS: columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl3_n1 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3_n1 @@ -1486,31 +1331,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1521,14 +1357,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -1581,8 +1411,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -1590,14 +1418,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl4_n1 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n1 TotalFiles: 1 @@ -1668,8 +1490,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -1677,14 +1497,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl4_n1 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n1 @@ -1796,31 +1610,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1831,14 +1636,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -1891,8 +1690,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -1900,14 +1697,8 @@ STAGE PLANS: columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3_n1 - numFiles 1 - numRows 5 - rawDataSize 25 - serialization.ddl struct outputtbl3_n1 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3_n1 TotalFiles: 1 @@ -1978,8 +1769,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,cnt @@ -1987,14 +1776,8 @@ STAGE PLANS: columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3_n1 - numFiles 1 - numRows 5 - rawDataSize 25 - serialization.ddl struct outputtbl3_n1 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3_n1 @@ -2118,31 +1901,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -2153,14 +1927,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -2213,8 +1981,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2222,14 +1988,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 TotalFiles: 1 @@ -2300,8 +2060,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2309,14 +2067,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 1 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 @@ -2434,8 +2186,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2443,14 +2193,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 2 - numRows 5 - rawDataSize 17 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 22 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 TotalFiles: 1 @@ -2486,31 +2230,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -2521,14 +2256,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -2566,8 +2295,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2575,14 +2302,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 2 - numRows 5 - rawDataSize 17 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 22 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 TotalFiles: 1 @@ -2618,31 +2339,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -2653,14 +2365,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -2714,8 +2420,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2723,14 +2427,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 2 - numRows 5 - rawDataSize 17 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 22 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 @@ -2858,8 +2556,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -2867,14 +2563,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 2 - numRows 10 - rawDataSize 30 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 40 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 TotalFiles: 1 @@ -2910,31 +2600,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -2945,14 +2626,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -2997,31 +2672,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -3032,14 +2698,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -3129,8 +2789,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -3138,14 +2796,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 2 - numRows 10 - rawDataSize 30 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 40 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 TotalFiles: 1 @@ -3185,8 +2837,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -3194,14 +2844,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 2 - numRows 10 - rawDataSize 30 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 40 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 @@ -3322,31 +2966,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -3357,14 +2992,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -3411,8 +3040,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -3420,14 +3047,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 3 - numRows 10 - rawDataSize 32 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 42 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 TotalFiles: 1 @@ -3461,31 +3082,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -3496,14 +3108,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -3555,8 +3161,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -3564,14 +3168,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 3 - numRows 10 - rawDataSize 32 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 42 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 @@ -3697,31 +3295,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -3732,14 +3321,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -3785,31 +3368,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -3820,14 +3394,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t1_n56 - numFiles 1 - numRows 6 - rawDataSize 24 - serialization.ddl struct t1_n56 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_n56 name: default.t1_n56 @@ -4010,31 +3578,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t2_n34 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n34 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val @@ -4045,14 +3604,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t2_n34 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n34 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2_n34 name: default.t2_n34 @@ -4105,8 +3658,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -4114,14 +3665,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 1 - numRows 4 - rawDataSize 12 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 16 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 TotalFiles: 1 @@ -4192,8 +3737,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,cnt @@ -4201,14 +3744,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.outputtbl1_n13 - numFiles 1 - numRows 4 - rawDataSize 12 - serialization.ddl struct outputtbl1_n13 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 16 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1_n13 @@ -4308,8 +3845,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -4317,14 +3852,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n1 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n1 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n1 TotalFiles: 1 @@ -4360,31 +3889,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t2_n34 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n34 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val @@ -4395,14 +3915,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t2_n34 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n34 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2_n34 name: default.t2_n34 @@ -4454,8 +3968,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -4463,14 +3975,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n1 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n1 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n1 @@ -4581,8 +4087,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true","key4":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,key4,cnt @@ -4590,14 +4094,8 @@ STAGE PLANS: columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl5_n1 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5_n1 TotalFiles: 1 @@ -4633,31 +4131,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t2_n34 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n34 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val @@ -4668,14 +4157,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t2_n34 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n34 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2_n34 name: default.t2_n34 @@ -4727,8 +4210,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true","key4":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,key4,cnt @@ -4736,14 +4217,8 @@ STAGE PLANS: columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl5_n1 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5_n1 @@ -4853,8 +4328,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -4862,14 +4335,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n1 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n1 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n1 TotalFiles: 1 @@ -4905,31 +4372,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t2_n34 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n34 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val @@ -4940,14 +4398,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t2_n34 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n34 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2_n34 name: default.t2_n34 @@ -4999,8 +4451,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -5008,14 +4458,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n1 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n1 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n1 @@ -5132,8 +4576,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -5141,14 +4583,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n1 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n1 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n1 TotalFiles: 1 @@ -5184,31 +4620,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val bucketing_version 2 column.name.delimiter , columns key,val - columns.comments columns.types string:string #### A masked pattern was here #### name default.t2_n34 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n34 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key,val @@ -5219,14 +4646,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.t2_n34 - numFiles 2 - numRows 6 - rawDataSize 24 - serialization.ddl struct t2_n34 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2_n34 name: default.t2_n34 @@ -5278,8 +4699,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cnt":"true","key1":"true","key2":"true","key3":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,key2,key3,cnt @@ -5287,14 +4706,8 @@ STAGE PLANS: columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4_n1 - numFiles 2 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4_n1 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4_n1 diff --git a/ql/src/test/results/clientpositive/llap/infer_bucket_sort_num_buckets.q.out b/ql/src/test/results/clientpositive/llap/infer_bucket_sort_num_buckets.q.out index 6deaafcb57..bab2ac51a9 100644 --- a/ql/src/test/results/clientpositive/llap/infer_bucket_sort_num_buckets.q.out +++ b/ql/src/test/results/clientpositive/llap/infer_bucket_sort_num_buckets.q.out @@ -80,30 +80,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -113,10 +103,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -129,30 +117,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -162,10 +140,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -272,7 +248,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -282,10 +257,8 @@ STAGE PLANS: name default.test_table_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test_table_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table_n0 TotalFiles: 1 @@ -307,7 +280,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -317,10 +289,8 @@ STAGE PLANS: name default.test_table_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test_table_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table_n0 diff --git a/ql/src/test/results/clientpositive/llap/input23.q.out b/ql/src/test/results/clientpositive/llap/input23.q.out index e0799d4d07..5600e9fbac 100644 --- a/ql/src/test/results/clientpositive/llap/input23.q.out +++ b/ql/src/test/results/clientpositive/llap/input23.q.out @@ -65,30 +65,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -98,10 +88,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart diff --git a/ql/src/test/results/clientpositive/llap/input_part1.q.out b/ql/src/test/results/clientpositive/llap/input_part1.q.out index eb74be644c..acd0ac6af2 100644 --- a/ql/src/test/results/clientpositive/llap/input_part1.q.out +++ b/ql/src/test/results/clientpositive/llap/input_part1.q.out @@ -64,8 +64,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","hr":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,hr,ds @@ -73,14 +71,8 @@ STAGE PLANS: columns.types int:string:string:string #### A masked pattern was here #### name default.dest1_n45 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n45 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n45 TotalFiles: 1 @@ -119,30 +111,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -152,10 +134,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -207,8 +187,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","hr":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,hr,ds @@ -216,14 +194,8 @@ STAGE PLANS: columns.types int:string:string:string #### A masked pattern was here #### name default.dest1_n45 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n45 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n45 diff --git a/ql/src/test/results/clientpositive/llap/input_part2.q.out b/ql/src/test/results/clientpositive/llap/input_part2.q.out index 3a5dde72f8..cb411ca417 100644 --- a/ql/src/test/results/clientpositive/llap/input_part2.q.out +++ b/ql/src/test/results/clientpositive/llap/input_part2.q.out @@ -77,8 +77,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","hr":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,hr,ds @@ -86,14 +84,8 @@ STAGE PLANS: columns.types int:string:string:string #### A masked pattern was here #### name default.dest1_n84 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n84 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n84 TotalFiles: 1 @@ -138,8 +130,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","hr":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,hr,ds @@ -147,14 +137,8 @@ STAGE PLANS: columns.types int:string:string:string #### A masked pattern was here #### name default.dest2_n20 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest2_n20 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2_n20 TotalFiles: 1 @@ -193,30 +177,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -226,10 +200,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -242,30 +214,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -275,10 +237,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -364,8 +324,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","hr":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,hr,ds @@ -373,14 +331,8 @@ STAGE PLANS: columns.types int:string:string:string #### A masked pattern was here #### name default.dest1_n84 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n84 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n84 @@ -403,8 +355,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","hr":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,hr,ds @@ -412,14 +362,8 @@ STAGE PLANS: columns.types int:string:string:string #### A masked pattern was here #### name default.dest2_n20 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest2_n20 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2_n20 diff --git a/ql/src/test/results/clientpositive/llap/input_part7.q.out b/ql/src/test/results/clientpositive/llap/input_part7.q.out index 6734bf1554..297ff38748 100644 --- a/ql/src/test/results/clientpositive/llap/input_part7.q.out +++ b/ql/src/test/results/clientpositive/llap/input_part7.q.out @@ -78,30 +78,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -111,10 +101,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -127,30 +115,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -160,10 +138,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -212,30 +188,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -245,10 +211,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -261,30 +225,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -294,10 +248,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart diff --git a/ql/src/test/results/clientpositive/llap/join17.q.out b/ql/src/test/results/clientpositive/llap/join17.q.out index 88f4bf6d71..e432fb8019 100644 --- a/ql/src/test/results/clientpositive/llap/join17.q.out +++ b/ql/src/test/results/clientpositive/llap/join17.q.out @@ -77,30 +77,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -108,14 +98,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -158,30 +142,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -189,14 +163,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -231,8 +199,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key1":"true","key2":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,value1,key2,value2 @@ -240,14 +206,8 @@ STAGE PLANS: columns.types int:string:int:string #### A masked pattern was here #### name default.dest1_n121 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n121 { i32 key1, string value1, i32 key2, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n121 TotalFiles: 1 @@ -318,8 +278,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key1":"true","key2":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key1,value1,key2,value2 @@ -327,14 +285,8 @@ STAGE PLANS: columns.types int:string:int:string #### A masked pattern was here #### name default.dest1_n121 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n121 { i32 key1, string value1, i32 key2, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n121 diff --git a/ql/src/test/results/clientpositive/llap/join26.q.out b/ql/src/test/results/clientpositive/llap/join26.q.out index 724ec9e371..71a0af2324 100644 --- a/ql/src/test/results/clientpositive/llap/join26.q.out +++ b/ql/src/test/results/clientpositive/llap/join26.q.out @@ -103,30 +103,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -134,14 +124,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -183,30 +167,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -214,14 +188,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 @@ -270,8 +238,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -279,14 +245,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n10 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n10 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n10 TotalFiles: 1 @@ -325,30 +285,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -358,10 +308,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -413,8 +361,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -422,14 +368,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n10 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n10 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n10 diff --git a/ql/src/test/results/clientpositive/llap/join32.q.out b/ql/src/test/results/clientpositive/llap/join32.q.out index 24b3e118dd..20db9a3bbd 100644 --- a/ql/src/test/results/clientpositive/llap/join32.q.out +++ b/ql/src/test/results/clientpositive/llap/join32.q.out @@ -103,30 +103,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -134,14 +124,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -184,30 +168,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -215,14 +189,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 @@ -271,8 +239,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -280,14 +246,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n12 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n12 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n12 TotalFiles: 1 @@ -326,30 +286,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -359,10 +309,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -414,8 +362,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -423,14 +369,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n12 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n12 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n12 diff --git a/ql/src/test/results/clientpositive/llap/join32_lessSize.q.out b/ql/src/test/results/clientpositive/llap/join32_lessSize.q.out index c7b8bf69e4..106050f45f 100644 --- a/ql/src/test/results/clientpositive/llap/join32_lessSize.q.out +++ b/ql/src/test/results/clientpositive/llap/join32_lessSize.q.out @@ -111,30 +111,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -142,14 +132,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -192,30 +176,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -223,14 +197,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 @@ -279,8 +247,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -288,14 +254,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n21 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n21 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n21 TotalFiles: 1 @@ -334,30 +294,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -367,10 +317,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -422,8 +370,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -431,14 +377,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n21 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n21 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n21 @@ -667,30 +607,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -698,14 +628,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 @@ -748,30 +672,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -779,14 +693,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 @@ -841,30 +749,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -872,14 +770,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -928,8 +820,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -937,14 +827,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n21 - numFiles 1 - numRows 85 - rawDataSize 1600 - serialization.ddl struct dest_j1_n21 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1685 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n21 TotalFiles: 1 @@ -980,30 +864,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1011,14 +885,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -1070,8 +938,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -1079,14 +945,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n21 - numFiles 1 - numRows 85 - rawDataSize 1600 - serialization.ddl struct dest_j1_n21 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1685 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n21 @@ -1311,30 +1171,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1342,14 +1192,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -1392,30 +1236,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1423,14 +1257,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 @@ -1479,8 +1307,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -1488,14 +1314,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j2_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j2_n1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j2_n1 TotalFiles: 1 @@ -1534,30 +1354,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1567,10 +1377,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1622,8 +1430,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -1631,14 +1437,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j2_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j2_n1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j2_n1 @@ -1853,30 +1653,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1884,14 +1674,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 @@ -1933,30 +1717,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1964,14 +1738,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -2020,8 +1788,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -2029,14 +1795,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j2_n1 - numFiles 1 - numRows 85 - rawDataSize 1600 - serialization.ddl struct dest_j2_n1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1685 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j2_n1 TotalFiles: 1 @@ -2075,30 +1835,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2108,10 +1858,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2187,8 +1935,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -2196,14 +1942,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j2_n1 - numFiles 1 - numRows 85 - rawDataSize 1600 - serialization.ddl struct dest_j2_n1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1685 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j2_n1 diff --git a/ql/src/test/results/clientpositive/llap/join33.q.out b/ql/src/test/results/clientpositive/llap/join33.q.out index 530e4a5e30..bfbb4ba310 100644 --- a/ql/src/test/results/clientpositive/llap/join33.q.out +++ b/ql/src/test/results/clientpositive/llap/join33.q.out @@ -103,30 +103,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -134,14 +124,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -184,30 +168,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -215,14 +189,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 @@ -271,8 +239,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -280,14 +246,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n7 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n7 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n7 TotalFiles: 1 @@ -326,30 +286,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -359,10 +309,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -414,8 +362,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -423,14 +369,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n7 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n7 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n7 diff --git a/ql/src/test/results/clientpositive/llap/join34.q.out b/ql/src/test/results/clientpositive/llap/join34.q.out index 0802efe2ac..a58ee4af3d 100644 --- a/ql/src/test/results/clientpositive/llap/join34.q.out +++ b/ql/src/test/results/clientpositive/llap/join34.q.out @@ -102,8 +102,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -111,14 +109,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n1 TotalFiles: 1 @@ -154,30 +146,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -185,14 +167,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -241,8 +217,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -250,14 +224,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n1 TotalFiles: 1 @@ -293,30 +261,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -324,14 +282,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -385,30 +337,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -416,14 +358,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 @@ -477,8 +413,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -486,14 +420,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.dest_j1_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n1 diff --git a/ql/src/test/results/clientpositive/llap/join35.q.out b/ql/src/test/results/clientpositive/llap/join35.q.out index fb4a512e0d..cd435a606c 100644 --- a/ql/src/test/results/clientpositive/llap/join35.q.out +++ b/ql/src/test/results/clientpositive/llap/join35.q.out @@ -101,30 +101,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -132,14 +122,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -185,30 +169,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -216,14 +190,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -277,30 +245,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -308,14 +266,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 @@ -359,8 +311,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -368,14 +318,8 @@ STAGE PLANS: columns.types string:string:int #### A masked pattern was here #### name default.dest_j1_n24 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n24 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n24 TotalFiles: 1 @@ -471,8 +415,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -480,14 +422,8 @@ STAGE PLANS: columns.types string:string:int #### A masked pattern was here #### name default.dest_j1_n24 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n24 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n24 TotalFiles: 1 @@ -527,8 +463,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","val2":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,val2 @@ -536,14 +470,8 @@ STAGE PLANS: columns.types string:string:int #### A masked pattern was here #### name default.dest_j1_n24 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest_j1_n24 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1_n24 diff --git a/ql/src/test/results/clientpositive/llap/join9.q.out b/ql/src/test/results/clientpositive/llap/join9.q.out index d28d5fe047..2ce0126eb0 100644 --- a/ql/src/test/results/clientpositive/llap/join9.q.out +++ b/ql/src/test/results/clientpositive/llap/join9.q.out @@ -83,30 +83,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -116,10 +106,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -162,30 +150,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -193,14 +171,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -235,8 +207,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -244,14 +214,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.dest1_n39 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n39 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n39 TotalFiles: 1 @@ -322,8 +286,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -331,14 +293,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.dest1_n39 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n39 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n39 diff --git a/ql/src/test/results/clientpositive/llap/join_filters_overlap.q.out b/ql/src/test/results/clientpositive/llap/join_filters_overlap.q.out index dd0f0337e9..cb87a53bb3 100644 --- a/ql/src/test/results/clientpositive/llap/join_filters_overlap.q.out +++ b/ql/src/test/results/clientpositive/llap/join_filters_overlap.q.out @@ -91,30 +91,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -122,14 +112,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a_n4 name: default.a_n4 @@ -172,30 +156,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -203,14 +177,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a_n4 name: default.a_n4 @@ -406,30 +374,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -437,14 +395,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a_n4 name: default.a_n4 @@ -482,30 +434,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -513,14 +455,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a_n4 name: default.a_n4 @@ -716,30 +652,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -747,14 +673,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a_n4 name: default.a_n4 @@ -792,30 +712,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -823,14 +733,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a_n4 name: default.a_n4 @@ -1043,30 +947,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1074,14 +968,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a_n4 name: default.a_n4 @@ -1119,30 +1007,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1150,14 +1028,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a_n4 name: default.a_n4 @@ -1392,30 +1264,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1423,14 +1285,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a_n4 name: default.a_n4 @@ -1473,30 +1329,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1504,14 +1350,8 @@ STAGE PLANS: columns.types int:int #### A masked pattern was here #### name default.a_n4 - numFiles 1 - numRows 3 - rawDataSize 18 - serialization.ddl struct a_n4 { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 21 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.a_n4 name: default.a_n4 diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_1.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_1.q.out index 189335a3ad..fa0d8f0ed8 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_1.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_1.q.out @@ -66,7 +66,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -76,10 +75,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part_n0 TotalFiles: 1 @@ -121,30 +118,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -154,10 +141,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -170,30 +155,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -203,10 +178,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -267,7 +240,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -277,10 +249,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part_n0 @@ -456,30 +426,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part_n0 - numFiles 2 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct list_bucketing_dynamic_part_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -489,10 +449,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part_n0 name: default.list_bucketing_dynamic_part_n0 diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_10.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_10.q.out index 3ce63ffa0a..a22819003b 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_10.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_10.q.out @@ -65,7 +65,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -75,10 +74,8 @@ STAGE PLANS: name default.list_bucketing_static_part partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part TotalFiles: 1 @@ -117,30 +114,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -148,14 +135,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -215,7 +196,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -225,10 +205,8 @@ STAGE PLANS: name default.list_bucketing_static_part partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_11.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_11.q.out index 3cfe511333..4fe1310685 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_11.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_11.q.out @@ -65,7 +65,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -75,10 +74,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n3 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n3 TotalFiles: 1 @@ -117,30 +114,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -148,14 +135,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -215,7 +196,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -225,10 +205,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n3 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n3 @@ -334,30 +312,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part_n3 - numFiles 4 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 4812 - serialization.ddl struct list_bucketing_static_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 5522 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -367,10 +335,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n3 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n3 name: default.list_bucketing_static_part_n3 diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_12.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_12.q.out index 297dd32b35..9b2ec89b48 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_12.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_12.q.out @@ -65,7 +65,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns col1,col2,col3,col4,col5 @@ -75,10 +74,8 @@ STAGE PLANS: name default.list_bucketing_mul_col_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_mul_col_n0 { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col_n0 TotalFiles: 1 @@ -117,30 +114,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -148,14 +135,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -215,7 +196,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns col1,col2,col3,col4,col5 @@ -225,10 +205,8 @@ STAGE PLANS: name default.list_bucketing_mul_col_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_mul_col_n0 { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col_n0 @@ -342,30 +320,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col1":"true","col2":"true","col3":"true","col4":"true","col5":"true"}} - bucket_count -1 column.name.delimiter , columns col1,col2,col3,col4,col5 - columns.comments columns.types string:string:string:string:string #### A masked pattern was here #### name default.list_bucketing_mul_col_n0 - numFiles 4 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 6312 - serialization.ddl struct list_bucketing_mul_col_n0 { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 7094 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns col1,col2,col3,col4,col5 @@ -375,10 +343,8 @@ STAGE PLANS: name default.list_bucketing_mul_col_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_mul_col_n0 { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col_n0 name: default.list_bucketing_mul_col_n0 @@ -442,30 +408,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col1":"true","col2":"true","col3":"true","col4":"true","col5":"true"}} - bucket_count -1 column.name.delimiter , columns col1,col2,col3,col4,col5 - columns.comments columns.types string:string:string:string:string #### A masked pattern was here #### name default.list_bucketing_mul_col_n0 - numFiles 4 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 6312 - serialization.ddl struct list_bucketing_mul_col_n0 { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 7094 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns col1,col2,col3,col4,col5 @@ -475,10 +431,8 @@ STAGE PLANS: name default.list_bucketing_mul_col_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_mul_col_n0 { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col_n0 name: default.list_bucketing_mul_col_n0 diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_13.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_13.q.out index e272f2180c..ac035e3203 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_13.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_13.q.out @@ -65,7 +65,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns col1,col2,col3,col4,col5 @@ -75,10 +74,8 @@ STAGE PLANS: name default.list_bucketing_mul_col partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col TotalFiles: 1 @@ -117,30 +114,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -148,14 +135,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -215,7 +196,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns col1,col2,col3,col4,col5 @@ -225,10 +205,8 @@ STAGE PLANS: name default.list_bucketing_mul_col partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col @@ -342,30 +320,20 @@ STAGE PLANS: ds 2008-04-08 hr 2013-01-23+18:00:99 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col1":"true","col2":"true","col3":"true","col4":"true","col5":"true"}} - bucket_count -1 column.name.delimiter , columns col1,col2,col3,col4,col5 - columns.comments columns.types string:string:string:string:string #### A masked pattern was here #### name default.list_bucketing_mul_col - numFiles 4 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 6312 - serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 7094 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns col1,col2,col3,col4,col5 @@ -375,10 +343,8 @@ STAGE PLANS: name default.list_bucketing_mul_col partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col name: default.list_bucketing_mul_col diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_14.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_14.q.out index 681652d8cd..e79a6e7a58 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_14.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_14.q.out @@ -58,8 +58,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -67,14 +65,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.list_bucketing - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct list_bucketing { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing TotalFiles: 1 @@ -110,30 +102,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -141,14 +123,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -200,8 +176,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -209,14 +183,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.list_bucketing - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct list_bucketing { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_2.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_2.q.out index bd1df7c92d..dcae664bb5 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_2.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_2.q.out @@ -71,7 +71,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -81,10 +80,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n4 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n4 TotalFiles: 1 @@ -126,30 +123,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -159,10 +146,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -175,30 +160,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -208,10 +183,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -272,7 +245,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -282,10 +254,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n4 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n4 @@ -413,30 +383,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part_n4 - numFiles 3 - numRows 1000 partition_columns ds/hr partition_columns.types string:string - rawDataSize 9624 - serialization.ddl struct list_bucketing_static_part_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 10659 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -446,10 +406,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n4 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n4 name: default.list_bucketing_static_part_n4 diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_3.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_3.q.out index ad1b340a9d..7ff817fa86 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_3.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_3.q.out @@ -61,7 +61,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -71,10 +70,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n1 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_static_part_n1 TotalFiles: 1 @@ -116,11 +113,8 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -129,17 +123,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -149,10 +140,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -165,11 +154,8 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -178,17 +164,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -198,10 +181,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -262,7 +243,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -272,10 +252,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n1 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_static_part_n1 @@ -405,11 +383,8 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part_n1 @@ -418,17 +393,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 10624 - serialization.ddl struct list_bucketing_static_part_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11624 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -438,10 +410,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n1 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_static_part_n1 name: default.list_bucketing_static_part_n1 diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_4.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_4.q.out index 25a29beee5..68144f03f1 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_4.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_4.q.out @@ -71,7 +71,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -81,10 +80,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n2 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n2 TotalFiles: 1 @@ -126,30 +123,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -159,10 +146,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -175,30 +160,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -208,10 +183,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -272,7 +245,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -282,10 +254,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n2 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n2 @@ -422,7 +392,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -432,10 +401,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n2 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n2 TotalFiles: 1 @@ -477,30 +444,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -510,10 +467,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -526,30 +481,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -559,10 +504,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -623,7 +566,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -633,10 +575,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n2 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n2 @@ -764,30 +704,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part_n2 - numFiles 3 - numRows 1000 partition_columns ds/hr partition_columns.types string:string - rawDataSize 9624 - serialization.ddl struct list_bucketing_static_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 10659 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -797,10 +727,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n2 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n2 name: default.list_bucketing_static_part_n2 diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_5.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_5.q.out index 7aa4b23f1f..e5cb4f95e6 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_5.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_5.q.out @@ -66,7 +66,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -76,10 +75,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n1 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part_n1 TotalFiles: 1 @@ -121,30 +118,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -154,10 +141,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -170,30 +155,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -203,10 +178,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -267,7 +240,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -277,10 +249,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n1 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part_n1 @@ -463,30 +433,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part_n1 - numFiles 3 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct list_bucketing_dynamic_part_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -496,10 +456,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n1 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part_n1 name: default.list_bucketing_dynamic_part_n1 @@ -510,30 +468,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part_n1 - numFiles 3 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct list_bucketing_dynamic_part_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -543,10 +491,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n1 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part_n1 name: default.list_bucketing_dynamic_part_n1 diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_6.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_6.q.out index a3dccdfe80..dd1e97bec2 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_6.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_6.q.out @@ -70,7 +70,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -80,10 +79,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n3 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part_n3 TotalFiles: 1 @@ -125,30 +122,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -158,10 +145,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -174,30 +159,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -207,10 +182,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -271,7 +244,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -281,10 +253,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n3 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part_n3 @@ -465,7 +435,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -475,10 +444,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n3 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part_n3 TotalFiles: 1 @@ -520,30 +487,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -553,10 +510,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -569,30 +524,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -602,10 +547,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -666,7 +609,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -676,10 +618,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n3 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part_n3 @@ -854,30 +794,20 @@ STAGE PLANS: ds 2008-04-08 hr a1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part_n3 - numFiles 1 - numRows 16 partition_columns ds/hr partition_columns.types string:string - rawDataSize 136 - serialization.ddl struct list_bucketing_dynamic_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 235 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -887,10 +817,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n3 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part_n3 name: default.list_bucketing_dynamic_part_n3 @@ -901,30 +829,20 @@ STAGE PLANS: ds 2008-04-08 hr b1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part_n3 - numFiles 3 - numRows 984 partition_columns ds/hr partition_columns.types string:string - rawDataSize 9488 - serialization.ddl struct list_bucketing_dynamic_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 10495 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -934,10 +852,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part_n3 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part_n3 name: default.list_bucketing_dynamic_part_n3 diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_7.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_7.q.out index b517fa046f..87cb08fe12 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_7.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_7.q.out @@ -70,7 +70,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -80,10 +79,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part TotalFiles: 1 @@ -125,30 +122,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -158,10 +145,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -174,30 +159,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -207,10 +182,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -271,7 +244,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -281,10 +253,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -465,7 +435,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -475,10 +444,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part TotalFiles: 1 @@ -520,30 +487,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -553,10 +510,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -569,30 +524,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -602,10 +547,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -666,7 +609,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -676,10 +618,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -854,30 +794,20 @@ STAGE PLANS: ds 2008-04-08 hr a1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part - numFiles 1 - numRows 16 partition_columns ds/hr partition_columns.types string:string - rawDataSize 136 - serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 235 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -887,10 +817,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part name: default.list_bucketing_dynamic_part @@ -901,30 +829,20 @@ STAGE PLANS: ds 2008-04-08 hr b1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part - numFiles 2 - numRows 984 partition_columns ds/hr partition_columns.types string:string - rawDataSize 9488 - serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 10416 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -934,10 +852,8 @@ STAGE PLANS: name default.list_bucketing_dynamic_part partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part name: default.list_bucketing_dynamic_part diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_dml_9.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_dml_9.q.out index 5e6e9cc935..1938bfbf4e 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_dml_9.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_dml_9.q.out @@ -71,7 +71,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -81,10 +80,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n0 TotalFiles: 1 @@ -126,30 +123,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -159,10 +146,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -175,30 +160,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -208,10 +183,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -272,7 +245,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -282,10 +254,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n0 @@ -422,7 +392,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -432,10 +401,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n0 TotalFiles: 1 @@ -477,30 +444,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -510,10 +467,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -526,30 +481,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -559,10 +504,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -623,7 +566,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -633,10 +575,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n0 @@ -764,30 +704,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part_n0 - numFiles 3 - numRows 1000 partition_columns ds/hr partition_columns.types string:string - rawDataSize 9624 - serialization.ddl struct list_bucketing_static_part_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 10659 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -797,10 +727,8 @@ STAGE PLANS: name default.list_bucketing_static_part_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct list_bucketing_static_part_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part_n0 name: default.list_bucketing_static_part_n0 diff --git a/ql/src/test/results/clientpositive/llap/list_bucket_query_oneskew_2.q.out b/ql/src/test/results/clientpositive/llap/list_bucket_query_oneskew_2.q.out index a60bfe65f8..f45e9d470e 100644 --- a/ql/src/test/results/clientpositive/llap/list_bucket_query_oneskew_2.q.out +++ b/ql/src/test/results/clientpositive/llap/list_bucket_query_oneskew_2.q.out @@ -158,28 +158,21 @@ STAGE PLANS: partition values: ds 1 properties: - bucket_count -1 column.name.delimiter , columns x,y - columns.comments columns.types int:string #### A masked pattern was here #### name default.fact_daily_n5 - numFiles 2 partition_columns ds partition_columns.types string - serialization.ddl struct fact_daily_n5 { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 24 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: EXTERNAL TRUE - bucket_count -1 bucketing_version 2 column.name.delimiter , columns x,y @@ -189,10 +182,8 @@ STAGE PLANS: name default.fact_daily_n5 partition_columns ds partition_columns.types string - serialization.ddl struct fact_daily_n5 { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily_n5 name: default.fact_daily_n5 @@ -247,28 +238,21 @@ STAGE PLANS: partition values: ds 1 properties: - bucket_count -1 column.name.delimiter , columns x,y - columns.comments columns.types int:string #### A masked pattern was here #### name default.fact_daily_n5 - numFiles 2 partition_columns ds partition_columns.types string - serialization.ddl struct fact_daily_n5 { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 24 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: EXTERNAL TRUE - bucket_count -1 bucketing_version 2 column.name.delimiter , columns x,y @@ -278,10 +262,8 @@ STAGE PLANS: name default.fact_daily_n5 partition_columns ds partition_columns.types string - serialization.ddl struct fact_daily_n5 { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily_n5 name: default.fact_daily_n5 @@ -381,28 +363,21 @@ STAGE PLANS: partition values: ds 1 properties: - bucket_count -1 column.name.delimiter , columns x,y - columns.comments columns.types int:string #### A masked pattern was here #### name default.fact_daily_n5 - numFiles 2 partition_columns ds partition_columns.types string - serialization.ddl struct fact_daily_n5 { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 24 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: EXTERNAL TRUE - bucket_count -1 bucketing_version 2 column.name.delimiter , columns x,y @@ -412,10 +387,8 @@ STAGE PLANS: name default.fact_daily_n5 partition_columns ds partition_columns.types string - serialization.ddl struct fact_daily_n5 { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily_n5 name: default.fact_daily_n5 @@ -543,28 +516,21 @@ STAGE PLANS: partition values: ds 1 properties: - bucket_count -1 column.name.delimiter , columns x,y - columns.comments columns.types int:string #### A masked pattern was here #### name default.fact_daily_n5 - numFiles 2 partition_columns ds partition_columns.types string - serialization.ddl struct fact_daily_n5 { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 24 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: EXTERNAL TRUE - bucket_count -1 bucketing_version 2 column.name.delimiter , columns x,y @@ -574,10 +540,8 @@ STAGE PLANS: name default.fact_daily_n5 partition_columns ds partition_columns.types string - serialization.ddl struct fact_daily_n5 { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily_n5 name: default.fact_daily_n5 diff --git a/ql/src/test/results/clientpositive/llap/load_dyn_part8.q.out b/ql/src/test/results/clientpositive/llap/load_dyn_part8.q.out index 28e12fd289..f46a6bcce2 100644 --- a/ql/src/test/results/clientpositive/llap/load_dyn_part8.q.out +++ b/ql/src/test/results/clientpositive/llap/load_dyn_part8.q.out @@ -98,7 +98,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -108,10 +107,8 @@ STAGE PLANS: name default.nzhang_part8_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct nzhang_part8_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part8_n0 TotalFiles: 1 @@ -160,7 +157,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -170,10 +166,8 @@ STAGE PLANS: name default.nzhang_part8_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct nzhang_part8_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part8_n0 TotalFiles: 1 @@ -215,30 +209,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -248,10 +232,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -264,30 +246,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -297,10 +269,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -313,30 +283,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -346,10 +306,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -362,30 +320,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -395,10 +343,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -499,7 +445,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -509,10 +454,8 @@ STAGE PLANS: name default.nzhang_part8_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct nzhang_part8_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part8_n0 @@ -533,7 +476,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -543,10 +485,8 @@ STAGE PLANS: name default.nzhang_part8_n0 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct nzhang_part8_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part8_n0 diff --git a/ql/src/test/results/clientpositive/llap/louter_join_ppr.q.out b/ql/src/test/results/clientpositive/llap/louter_join_ppr.q.out index f39ba92a88..ac36d6c20f 100644 --- a/ql/src/test/results/clientpositive/llap/louter_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/llap/louter_join_ppr.q.out @@ -82,30 +82,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -113,14 +103,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -166,30 +150,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -199,10 +173,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -215,30 +187,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -248,10 +210,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -427,30 +387,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -460,10 +410,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -476,30 +424,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -509,10 +447,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -556,30 +492,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -587,14 +513,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -766,30 +686,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -797,14 +707,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -850,30 +754,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -883,10 +777,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -899,30 +791,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -932,10 +814,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1111,30 +991,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1144,10 +1014,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1160,30 +1028,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1193,10 +1051,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1240,30 +1096,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1271,14 +1117,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/llap/mapjoin_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/mapjoin_mapjoin.q.out index f69a950361..405b71e217 100644 --- a/ql/src/test/results/clientpositive/llap/mapjoin_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/llap/mapjoin_mapjoin.q.out @@ -117,30 +117,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -150,10 +140,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -166,30 +154,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -199,10 +177,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -215,30 +191,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -248,10 +214,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -264,30 +228,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -297,10 +251,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -345,30 +297,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -376,14 +318,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src1 - numFiles 1 - numRows 25 - rawDataSize 191 - serialization.ddl struct src1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src1 name: default.src1 @@ -425,30 +361,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -456,14 +382,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/llap/merge3.q.out b/ql/src/test/results/clientpositive/llap/merge3.q.out index e985f54235..141e40891e 100644 --- a/ql/src/test/results/clientpositive/llap/merge3.q.out +++ b/ql/src/test/results/clientpositive/llap/merge3.q.out @@ -143,30 +143,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.merge_src - numFiles 1 - numRows 2000 - rawDataSize 21248 - serialization.ddl struct merge_src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -174,14 +164,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.merge_src - numFiles 1 - numRows 2000 - rawDataSize 21248 - serialization.ddl struct merge_src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src name: default.merge_src @@ -2370,7 +2354,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2380,10 +2363,8 @@ STAGE PLANS: name default.merge_src_part2 partition_columns ds partition_columns.types string - serialization.ddl struct merge_src_part2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part2 TotalFiles: 1 @@ -2424,30 +2405,20 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 1 - numRows 1000 partition_columns ds partition_columns.types string - rawDataSize 10624 - serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2457,10 +2428,8 @@ STAGE PLANS: name default.merge_src_part partition_columns ds partition_columns.types string - serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part name: default.merge_src_part @@ -2472,30 +2441,20 @@ STAGE PLANS: partition values: ds 2008-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 1 - numRows 1000 partition_columns ds partition_columns.types string - rawDataSize 10624 - serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2505,10 +2464,8 @@ STAGE PLANS: name default.merge_src_part partition_columns ds partition_columns.types string - serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part name: default.merge_src_part @@ -2568,7 +2525,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2578,10 +2534,8 @@ STAGE PLANS: name default.merge_src_part2 partition_columns ds partition_columns.types string - serialization.ddl struct merge_src_part2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part2 @@ -4718,30 +4672,20 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 1 - numRows 1000 partition_columns ds partition_columns.types string - rawDataSize 10624 - serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4751,10 +4695,8 @@ STAGE PLANS: name default.merge_src_part partition_columns ds partition_columns.types string - serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part name: default.merge_src_part @@ -4766,30 +4708,20 @@ STAGE PLANS: partition values: ds 2008-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 1 - numRows 1000 partition_columns ds partition_columns.types string - rawDataSize 10624 - serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4799,10 +4731,8 @@ STAGE PLANS: name default.merge_src_part partition_columns ds partition_columns.types string - serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part name: default.merge_src_part @@ -4829,7 +4759,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4839,10 +4768,8 @@ STAGE PLANS: name default.merge_src_part2 partition_columns ds partition_columns.types string - serialization.ddl struct merge_src_part2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part2 TotalFiles: 1 @@ -4901,7 +4828,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4911,10 +4837,8 @@ STAGE PLANS: name default.merge_src_part2 partition_columns ds partition_columns.types string - serialization.ddl struct merge_src_part2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part2 diff --git a/ql/src/test/results/clientpositive/llap/metadataonly1.q.out b/ql/src/test/results/clientpositive/llap/metadataonly1.q.out index 38a244dcce..cba437bdaf 100644 --- a/ql/src/test/results/clientpositive/llap/metadataonly1.q.out +++ b/ql/src/test/results/clientpositive/llap/metadataonly1.q.out @@ -172,30 +172,20 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test1_n12 - numFiles 0 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -205,10 +195,8 @@ STAGE PLANS: name default.test1_n12 partition_columns ds partition_columns.types string - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n12 name: default.test1_n12 @@ -327,30 +315,20 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test1_n12 - numFiles 0 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -360,10 +338,8 @@ STAGE PLANS: name default.test1_n12 partition_columns ds partition_columns.types string - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n12 name: default.test1_n12 @@ -482,30 +458,20 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test1_n12 - numFiles 0 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -515,10 +481,8 @@ STAGE PLANS: name default.test1_n12 partition_columns ds partition_columns.types string - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n12 name: default.test1_n12 @@ -652,30 +616,20 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test1_n12 - numFiles 0 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -685,10 +639,8 @@ STAGE PLANS: name default.test1_n12 partition_columns ds partition_columns.types string - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n12 name: default.test1_n12 @@ -700,30 +652,20 @@ STAGE PLANS: partition values: ds 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test1_n12 - numFiles 0 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -733,10 +675,8 @@ STAGE PLANS: name default.test1_n12 partition_columns ds partition_columns.types string - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n12 name: default.test1_n12 @@ -781,30 +721,20 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test1_n12 - numFiles 0 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -814,10 +744,8 @@ STAGE PLANS: name default.test1_n12 partition_columns ds partition_columns.types string - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n12 name: default.test1_n12 @@ -829,30 +757,20 @@ STAGE PLANS: partition values: ds 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test1_n12 - numFiles 0 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -862,10 +780,8 @@ STAGE PLANS: name default.test1_n12 partition_columns ds partition_columns.types string - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n12 name: default.test1_n12 @@ -1091,30 +1007,20 @@ STAGE PLANS: ds 1 hr 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test2_n8 - numFiles 0 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -1124,10 +1030,8 @@ STAGE PLANS: name default.test2_n8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n8 name: default.test2_n8 @@ -1139,30 +1043,20 @@ STAGE PLANS: ds 1 hr 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test2_n8 - numFiles 0 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -1172,10 +1066,8 @@ STAGE PLANS: name default.test2_n8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n8 name: default.test2_n8 @@ -1187,30 +1079,20 @@ STAGE PLANS: ds 1 hr 3 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test2_n8 - numFiles 0 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -1220,10 +1102,8 @@ STAGE PLANS: name default.test2_n8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n8 name: default.test2_n8 @@ -1357,30 +1237,20 @@ STAGE PLANS: ds 1 hr 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test2_n8 - numFiles 0 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -1390,10 +1260,8 @@ STAGE PLANS: name default.test2_n8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n8 name: default.test2_n8 @@ -1406,30 +1274,20 @@ STAGE PLANS: ds 1 hr 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test2_n8 - numFiles 0 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -1439,10 +1297,8 @@ STAGE PLANS: name default.test2_n8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n8 name: default.test2_n8 @@ -1455,30 +1311,20 @@ STAGE PLANS: ds 1 hr 3 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test2_n8 - numFiles 0 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -1488,10 +1334,8 @@ STAGE PLANS: name default.test2_n8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n8 name: default.test2_n8 @@ -1618,30 +1462,20 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test1_n12 - numFiles 0 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -1651,10 +1485,8 @@ STAGE PLANS: name default.test1_n12 partition_columns ds partition_columns.types string - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n12 name: default.test1_n12 @@ -1665,30 +1497,20 @@ STAGE PLANS: partition values: ds 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test1_n12 - numFiles 0 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -1698,10 +1520,8 @@ STAGE PLANS: name default.test1_n12 partition_columns ds partition_columns.types string - serialization.ddl struct test1_n12 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1_n12 name: default.test1_n12 @@ -1887,30 +1707,20 @@ STAGE PLANS: ds 01:10:10 hr 01 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test2_n8 - numFiles 0 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -1920,10 +1730,8 @@ STAGE PLANS: name default.test2_n8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n8 name: default.test2_n8 @@ -1935,30 +1743,20 @@ STAGE PLANS: ds 01:10:20 hr 02 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test2_n8 - numFiles 0 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -1968,10 +1766,8 @@ STAGE PLANS: name default.test2_n8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n8 name: default.test2_n8 @@ -1983,30 +1779,20 @@ STAGE PLANS: ds 1 hr 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test2_n8 - numFiles 0 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -2016,10 +1802,8 @@ STAGE PLANS: name default.test2_n8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n8 name: default.test2_n8 @@ -2031,30 +1815,20 @@ STAGE PLANS: ds 1 hr 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test2_n8 - numFiles 0 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -2064,10 +1838,8 @@ STAGE PLANS: name default.test2_n8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n8 name: default.test2_n8 @@ -2079,30 +1851,20 @@ STAGE PLANS: ds 1 hr 3 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true"}} - bucket_count -1 column.name.delimiter , columns a,b - columns.comments columns.types int:double #### A masked pattern was here #### name default.test2_n8 - numFiles 0 - numRows 0 partition_columns ds/hr partition_columns.types string:string - rawDataSize 0 - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns a,b @@ -2112,10 +1874,8 @@ STAGE PLANS: name default.test2_n8 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct test2_n8 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2_n8 name: default.test2_n8 diff --git a/ql/src/test/results/clientpositive/llap/murmur_hash_migration.q.out b/ql/src/test/results/clientpositive/llap/murmur_hash_migration.q.out index 10de2898ac..54600bc17f 100644 --- a/ql/src/test/results/clientpositive/llap/murmur_hash_migration.q.out +++ b/ql/src/test/results/clientpositive/llap/murmur_hash_migration.q.out @@ -208,25 +208,17 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 4 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n20 - numFiles 4 - numRows 150 partition_columns ds partition_columns.types string - rawDataSize 1602 - serialization.ddl struct srcbucket_mapjoin_part_n20 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1752 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -243,10 +235,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n20 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n20 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n20 name: default.srcbucket_mapjoin_part_n20 @@ -284,10 +274,8 @@ STAGE PLANS: name default.tab_part_n11 partition_columns ds partition_columns.types string - serialization.ddl struct tab_part_n11 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tab_part_n11 TotalFiles: 1 @@ -319,10 +307,8 @@ STAGE PLANS: name default.tab_part_n11 partition_columns ds partition_columns.types string - serialization.ddl struct tab_part_n11 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tab_part_n11 @@ -417,25 +403,17 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n18 - numFiles 2 - numRows 150 partition_columns ds partition_columns.types string - rawDataSize 1598 - serialization.ddl struct srcbucket_mapjoin_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1748 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -452,10 +430,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_n18 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n18 name: default.srcbucket_mapjoin_n18 @@ -493,10 +469,8 @@ STAGE PLANS: name default.tab_n10 partition_columns ds partition_columns.types string - serialization.ddl struct tab_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tab_n10 TotalFiles: 1 @@ -528,10 +502,8 @@ STAGE PLANS: name default.tab_n10 partition_columns ds partition_columns.types string - serialization.ddl struct tab_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tab_n10 @@ -669,25 +641,17 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n18 - numFiles 2 - numRows 150 partition_columns ds partition_columns.types string - rawDataSize 1598 - serialization.ddl struct srcbucket_mapjoin_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1748 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -704,10 +668,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_n18 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_n18 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n18 name: default.srcbucket_mapjoin_n18 @@ -752,25 +714,17 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 4 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n20 - numFiles 4 - numRows 150 partition_columns ds partition_columns.types string - rawDataSize 1602 - serialization.ddl struct srcbucket_mapjoin_part_n20 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1752 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -787,10 +741,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n20 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n20 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n20 name: default.srcbucket_mapjoin_part_n20 @@ -963,25 +915,17 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 4 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.tab_part_n11 - numFiles 4 - numRows 150 partition_columns ds partition_columns.types string - rawDataSize 1602 - serialization.ddl struct tab_part_n11 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1752 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -998,10 +942,8 @@ STAGE PLANS: name default.tab_part_n11 partition_columns ds partition_columns.types string - serialization.ddl struct tab_part_n11 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tab_part_n11 name: default.tab_part_n11 @@ -1046,25 +988,17 @@ STAGE PLANS: partition values: ds 2008-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.tab_n10 - numFiles 2 - numRows 150 partition_columns ds partition_columns.types string - rawDataSize 1598 - serialization.ddl struct tab_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 1748 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1081,10 +1015,8 @@ STAGE PLANS: name default.tab_n10 partition_columns ds partition_columns.types string - serialization.ddl struct tab_n10 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tab_n10 name: default.tab_n10 diff --git a/ql/src/test/results/clientpositive/llap/murmur_hash_migration2.q.out b/ql/src/test/results/clientpositive/llap/murmur_hash_migration2.q.out index 29f50d8556..cc74705cc5 100644 --- a/ql/src/test/results/clientpositive/llap/murmur_hash_migration2.q.out +++ b/ql/src/test/results/clientpositive/llap/murmur_hash_migration2.q.out @@ -75,11 +75,9 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns - columns.comments columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -87,7 +85,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.NullRowsInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns @@ -95,7 +92,6 @@ STAGE PLANS: columns.types #### A masked pattern was here #### name _dummy_database._dummy_table - serialization.ddl struct _dummy_table { } serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe serde: org.apache.hadoop.hive.serde2.NullStructSerDe @@ -134,12 +130,10 @@ STAGE PLANS: name default.acid_ptn_bucket1 partition_columns ds partition_columns.types string - serialization.ddl struct acid_ptn_bucket1 { i32 a, i32 b} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde transactional true transactional_properties default -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.acid_ptn_bucket1 TotalFiles: 1 @@ -172,12 +166,10 @@ STAGE PLANS: name default.acid_ptn_bucket1 partition_columns ds partition_columns.types string - serialization.ddl struct acid_ptn_bucket1 { i32 a, i32 b} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde transactional true transactional_properties default -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.acid_ptn_bucket1 Write Type: INSERT diff --git a/ql/src/test/results/clientpositive/llap/offset_limit_global_optimizer.q.out b/ql/src/test/results/clientpositive/llap/offset_limit_global_optimizer.q.out index bf5c28670d..7e14fc7988 100644 --- a/ql/src/test/results/clientpositive/llap/offset_limit_global_optimizer.q.out +++ b/ql/src/test/results/clientpositive/llap/offset_limit_global_optimizer.q.out @@ -68,30 +68,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -101,10 +91,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -117,30 +105,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -150,10 +128,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -166,30 +142,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -199,10 +165,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -215,30 +179,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -248,10 +202,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -399,30 +351,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -432,10 +374,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -448,30 +388,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -481,10 +411,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -497,30 +425,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -530,10 +448,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -546,30 +462,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -579,10 +485,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -730,30 +634,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -763,10 +657,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -779,30 +671,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -812,10 +694,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -828,30 +708,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -861,10 +731,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -877,30 +745,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -910,10 +768,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1071,30 +927,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1104,10 +950,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1120,30 +964,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1153,10 +987,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1169,30 +1001,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1202,10 +1024,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1218,30 +1038,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1251,10 +1061,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1987,30 +1795,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2020,10 +1818,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2036,30 +1832,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2069,10 +1855,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2085,30 +1869,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2118,10 +1892,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2134,30 +1906,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2167,10 +1929,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2313,30 +2073,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2346,10 +2096,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2362,30 +2110,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2395,10 +2133,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2411,30 +2147,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2444,10 +2170,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2460,30 +2184,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2493,10 +2207,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2639,30 +2351,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2672,10 +2374,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2688,30 +2388,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2721,10 +2411,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2737,30 +2425,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2770,10 +2448,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2786,30 +2462,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2819,10 +2485,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -2975,30 +2639,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3008,10 +2662,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -3024,30 +2676,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3057,10 +2699,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -3073,30 +2713,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3106,10 +2736,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -3122,30 +2750,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3155,10 +2773,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart diff --git a/ql/src/test/results/clientpositive/llap/optimize_nullscan.q.out b/ql/src/test/results/clientpositive/llap/optimize_nullscan.q.out index 77ddaa273e..b1a46e995a 100644 --- a/ql/src/test/results/clientpositive/llap/optimize_nullscan.q.out +++ b/ql/src/test/results/clientpositive/llap/optimize_nullscan.q.out @@ -206,30 +206,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -237,14 +227,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -289,30 +273,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -322,10 +296,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -337,30 +309,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -370,10 +332,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -385,30 +345,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -418,10 +368,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -433,30 +381,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -466,10 +404,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -608,30 +544,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -639,14 +565,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -691,30 +611,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -724,10 +634,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -740,30 +648,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -773,10 +671,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -789,30 +685,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -822,10 +708,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -838,30 +722,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -871,10 +745,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1044,30 +916,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1075,14 +937,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -1126,30 +982,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1159,10 +1005,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1174,30 +1018,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1207,10 +1041,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1222,30 +1054,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1255,10 +1077,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1270,30 +1090,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1303,10 +1113,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1452,30 +1260,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1483,14 +1281,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -1526,30 +1318,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1557,14 +1339,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -1701,30 +1477,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1732,14 +1498,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -1776,30 +1536,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1807,14 +1557,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -1933,30 +1677,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1964,14 +1698,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/llap/outer_join_ppr.q.out b/ql/src/test/results/clientpositive/llap/outer_join_ppr.q.out index d7bc092156..13bfa0fb2d 100644 --- a/ql/src/test/results/clientpositive/llap/outer_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/llap/outer_join_ppr.q.out @@ -82,30 +82,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -113,14 +103,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -166,30 +150,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -199,10 +173,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -215,30 +187,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -248,10 +210,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -424,30 +384,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -455,14 +405,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -508,30 +452,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -541,10 +475,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -557,30 +489,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -590,10 +512,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart diff --git a/ql/src/test/results/clientpositive/llap/parquet_vectorization_0.q.out b/ql/src/test/results/clientpositive/llap/parquet_vectorization_0.q.out index 8b5e892d9a..93ecd40c36 100644 --- a/ql/src/test/results/clientpositive/llap/parquet_vectorization_0.q.out +++ b/ql/src/test/results/clientpositive/llap/parquet_vectorization_0.q.out @@ -1263,30 +1263,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat output format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 - columns.comments columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesparquet - numFiles 1 - numRows 12288 - rawDataSize __SOME_NUMBER__ - serialization.ddl struct alltypesparquet { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe - totalSize __SOME_NUMBER__ -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe input format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat output format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 @@ -1294,14 +1284,8 @@ STAGE PLANS: columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesparquet - numFiles 1 - numRows 12288 - rawDataSize __SOME_NUMBER__ - serialization.ddl struct alltypesparquet { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe - totalSize __SOME_NUMBER__ -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe name: default.alltypesparquet name: default.alltypesparquet @@ -30098,30 +30082,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat output format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 - columns.comments columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesparquet - numFiles 1 - numRows 12288 - rawDataSize __SOME_NUMBER__ - serialization.ddl struct alltypesparquet { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe - totalSize __SOME_NUMBER__ -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe input format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat output format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 @@ -30129,14 +30103,8 @@ STAGE PLANS: columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesparquet - numFiles 1 - numRows 12288 - rawDataSize __SOME_NUMBER__ - serialization.ddl struct alltypesparquet { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe - totalSize __SOME_NUMBER__ -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe name: default.alltypesparquet name: default.alltypesparquet @@ -30225,30 +30193,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat output format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 - columns.comments columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesparquet - numFiles 1 - numRows 12288 - rawDataSize __SOME_NUMBER__ - serialization.ddl struct alltypesparquet { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe - totalSize __SOME_NUMBER__ -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe input format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat output format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 @@ -30256,14 +30214,8 @@ STAGE PLANS: columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesparquet - numFiles 1 - numRows 12288 - rawDataSize __SOME_NUMBER__ - serialization.ddl struct alltypesparquet { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe - totalSize __SOME_NUMBER__ -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe name: default.alltypesparquet name: default.alltypesparquet @@ -30352,30 +30304,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat output format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 - columns.comments columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesparquet - numFiles 1 - numRows 12288 - rawDataSize __SOME_NUMBER__ - serialization.ddl struct alltypesparquet { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe - totalSize __SOME_NUMBER__ -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe input format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat output format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 @@ -30383,14 +30325,8 @@ STAGE PLANS: columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesparquet - numFiles 1 - numRows 12288 - rawDataSize __SOME_NUMBER__ - serialization.ddl struct alltypesparquet { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe - totalSize __SOME_NUMBER__ -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe name: default.alltypesparquet name: default.alltypesparquet @@ -30473,30 +30409,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat output format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 - columns.comments columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesparquet - numFiles 1 - numRows 12288 - rawDataSize __SOME_NUMBER__ - serialization.ddl struct alltypesparquet { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe - totalSize __SOME_NUMBER__ -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe input format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat output format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 @@ -30504,14 +30430,8 @@ STAGE PLANS: columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesparquet - numFiles 1 - numRows 12288 - rawDataSize __SOME_NUMBER__ - serialization.ddl struct alltypesparquet { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe - totalSize __SOME_NUMBER__ -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe name: default.alltypesparquet name: default.alltypesparquet diff --git a/ql/src/test/results/clientpositive/llap/pcr.q.out b/ql/src/test/results/clientpositive/llap/pcr.q.out index 755acde898..d10364c727 100644 --- a/ql/src/test/results/clientpositive/llap/pcr.q.out +++ b/ql/src/test/results/clientpositive/llap/pcr.q.out @@ -114,30 +114,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -147,10 +137,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -162,30 +150,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -195,10 +173,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -334,30 +310,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -367,10 +333,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -382,30 +346,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -415,10 +369,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -430,30 +382,20 @@ STAGE PLANS: partition values: ds 2000-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -463,10 +405,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -638,30 +578,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -671,10 +601,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -686,30 +614,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -719,10 +637,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -858,30 +774,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -891,10 +797,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -906,30 +810,20 @@ STAGE PLANS: partition values: ds 2000-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -939,10 +833,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -1082,30 +974,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1115,10 +997,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -1130,30 +1010,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1163,10 +1033,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -1178,30 +1046,20 @@ STAGE PLANS: partition values: ds 2000-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1211,10 +1069,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -1365,30 +1221,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1398,10 +1244,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -1413,30 +1257,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1446,10 +1280,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -1461,30 +1293,20 @@ STAGE PLANS: partition values: ds 2000-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1494,10 +1316,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -1654,30 +1474,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1687,10 +1497,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -1702,30 +1510,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1735,10 +1533,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -1857,30 +1653,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1890,10 +1676,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -1905,30 +1689,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1938,10 +1712,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -2102,30 +1874,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2135,10 +1897,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -2150,30 +1910,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2183,10 +1933,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -2198,30 +1946,20 @@ STAGE PLANS: partition values: ds 2000-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2231,10 +1969,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -2420,30 +2156,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2453,10 +2179,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -2468,30 +2192,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2501,10 +2215,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -2634,30 +2346,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2667,10 +2369,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -2715,30 +2415,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2748,10 +2438,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -2940,30 +2628,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2973,10 +2651,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -3021,30 +2697,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3054,10 +2720,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -3254,30 +2918,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3287,10 +2941,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -3302,30 +2954,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3335,10 +2977,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -3350,30 +2990,20 @@ STAGE PLANS: partition values: ds 2000-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3383,10 +3013,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -3398,30 +3026,20 @@ STAGE PLANS: partition values: ds 2000-04-11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3431,10 +3049,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -3607,30 +3223,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3640,10 +3246,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -3655,30 +3259,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3688,10 +3282,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -3703,30 +3295,20 @@ STAGE PLANS: partition values: ds 2000-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3736,10 +3318,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -3914,8 +3494,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3923,14 +3501,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.pcr_t2 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 TotalFiles: 1 @@ -3971,8 +3543,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -3980,14 +3550,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.pcr_t3 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 TotalFiles: 1 @@ -4025,30 +3589,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4058,10 +3612,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -4146,8 +3698,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4155,14 +3705,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.pcr_t2 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 @@ -4185,8 +3729,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4194,14 +3736,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.pcr_t3 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 @@ -4296,8 +3832,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4305,14 +3839,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.pcr_t2 - numFiles 1 - numRows 20 - rawDataSize 160 - serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 TotalFiles: 1 @@ -4357,8 +3885,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4366,14 +3892,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.pcr_t3 - numFiles 1 - numRows 20 - rawDataSize 160 - serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 TotalFiles: 1 @@ -4411,30 +3931,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4444,10 +3954,8 @@ STAGE PLANS: name default.pcr_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 @@ -4532,8 +4040,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4541,14 +4047,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.pcr_t2 - numFiles 1 - numRows 20 - rawDataSize 160 - serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 @@ -4571,8 +4071,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4580,14 +4078,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.pcr_t3 - numFiles 1 - numRows 20 - rawDataSize 160 - serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 @@ -4691,30 +4183,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4724,10 +4206,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -4851,30 +4331,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4884,10 +4354,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -4900,30 +4368,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -4933,10 +4391,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -5064,30 +4520,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -5097,10 +4543,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -5113,30 +4557,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -5146,10 +4580,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart diff --git a/ql/src/test/results/clientpositive/llap/pcs.q.out b/ql/src/test/results/clientpositive/llap/pcs.q.out index 9a1d5a5362..c20618e28e 100644 --- a/ql/src/test/results/clientpositive/llap/pcs.q.out +++ b/ql/src/test/results/clientpositive/llap/pcs.q.out @@ -151,30 +151,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -184,10 +174,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -199,30 +187,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -232,10 +210,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -323,30 +299,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -356,10 +322,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -369,30 +333,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -402,10 +356,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -464,30 +416,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -497,10 +439,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -510,30 +450,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -543,10 +473,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -655,30 +583,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -688,10 +606,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -703,30 +619,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -736,10 +642,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -781,30 +685,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -814,10 +708,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -829,30 +721,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -862,10 +744,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -962,30 +842,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -995,10 +865,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -1008,30 +876,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1041,10 +899,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -1353,30 +1209,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1386,10 +1232,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -1462,30 +1306,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1495,10 +1329,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -1508,30 +1340,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1541,10 +1363,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -1591,30 +1411,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1624,10 +1434,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -1637,30 +1445,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1670,10 +1468,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -1683,30 +1479,20 @@ STAGE PLANS: partition values: ds 2000-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1716,10 +1502,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -1784,30 +1568,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1817,10 +1591,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -1830,30 +1602,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1863,10 +1625,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 @@ -1876,30 +1636,20 @@ STAGE PLANS: partition values: ds 2000-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1909,10 +1659,8 @@ STAGE PLANS: name default.pcs_t1 partition_columns ds partition_columns.types string - serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 name: default.pcs_t1 diff --git a/ql/src/test/results/clientpositive/llap/pointlookup2.q.out b/ql/src/test/results/clientpositive/llap/pointlookup2.q.out index 01fadb3c62..ef413831e3 100644 --- a/ql/src/test/results/clientpositive/llap/pointlookup2.q.out +++ b/ql/src/test/results/clientpositive/llap/pointlookup2.q.out @@ -159,30 +159,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -192,10 +182,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -207,30 +195,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -240,10 +218,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -368,30 +344,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -401,10 +367,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -449,30 +413,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -482,10 +436,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -634,30 +586,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -667,10 +609,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -715,30 +655,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -748,10 +678,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -896,30 +824,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -929,10 +847,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -944,30 +860,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -977,10 +883,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -1022,30 +926,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ds,key,value - columns.comments columns.types string:int:string #### A masked pattern was here #### name default.pcr_t2_n0 - numFiles 1 - numRows 1 - rawDataSize 18 - serialization.ddl struct pcr_t2_n0 { string ds, i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 19 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ds,key,value @@ -1053,14 +947,8 @@ STAGE PLANS: columns.types string:int:string #### A masked pattern was here #### name default.pcr_t2_n0 - numFiles 1 - numRows 1 - rawDataSize 18 - serialization.ddl struct pcr_t2_n0 { string ds, i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 19 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2_n0 name: default.pcr_t2_n0 @@ -1216,30 +1104,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1249,10 +1127,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -1264,30 +1140,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1297,10 +1163,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -1312,30 +1176,20 @@ STAGE PLANS: partition values: ds 2000-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1345,10 +1199,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -1391,30 +1243,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ds,key,value - columns.comments columns.types string:int:string #### A masked pattern was here #### name default.pcr_t2_n0 - numFiles 1 - numRows 1 - rawDataSize 18 - serialization.ddl struct pcr_t2_n0 { string ds, i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 19 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ds,key,value @@ -1422,14 +1264,8 @@ STAGE PLANS: columns.types string:int:string #### A masked pattern was here #### name default.pcr_t2_n0 - numFiles 1 - numRows 1 - rawDataSize 18 - serialization.ddl struct pcr_t2_n0 { string ds, i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 19 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2_n0 name: default.pcr_t2_n0 @@ -1753,30 +1589,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1786,10 +1612,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -1801,30 +1625,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1834,10 +1648,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -1962,30 +1774,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1995,10 +1797,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -2043,30 +1843,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2076,10 +1866,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -2228,30 +2016,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2261,10 +2039,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -2309,30 +2085,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2342,10 +2108,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -2482,30 +2246,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2515,10 +2269,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -2530,30 +2282,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2563,10 +2305,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -2608,30 +2348,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ds,key,value - columns.comments columns.types string:int:string #### A masked pattern was here #### name default.pcr_t2_n0 - numFiles 1 - numRows 1 - rawDataSize 18 - serialization.ddl struct pcr_t2_n0 { string ds, i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 19 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ds,key,value @@ -2639,14 +2369,8 @@ STAGE PLANS: columns.types string:int:string #### A masked pattern was here #### name default.pcr_t2_n0 - numFiles 1 - numRows 1 - rawDataSize 18 - serialization.ddl struct pcr_t2_n0 { string ds, i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 19 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2_n0 name: default.pcr_t2_n0 @@ -2790,30 +2514,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2823,10 +2537,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -2838,30 +2550,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2871,10 +2573,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -2886,30 +2586,20 @@ STAGE PLANS: partition values: ds 2000-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n2 - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2919,10 +2609,8 @@ STAGE PLANS: name default.pcr_t1_n2 partition_columns ds partition_columns.types string - serialization.ddl struct pcr_t1_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n2 name: default.pcr_t1_n2 @@ -2965,30 +2653,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ds,key,value - columns.comments columns.types string:int:string #### A masked pattern was here #### name default.pcr_t2_n0 - numFiles 1 - numRows 1 - rawDataSize 18 - serialization.ddl struct pcr_t2_n0 { string ds, i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 19 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ds,key,value @@ -2996,14 +2674,8 @@ STAGE PLANS: columns.types string:int:string #### A masked pattern was here #### name default.pcr_t2_n0 - numFiles 1 - numRows 1 - rawDataSize 18 - serialization.ddl struct pcr_t2_n0 { string ds, i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 19 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2_n0 name: default.pcr_t2_n0 diff --git a/ql/src/test/results/clientpositive/llap/pointlookup3.q.out b/ql/src/test/results/clientpositive/llap/pointlookup3.q.out index d945be2023..7962d3391c 100644 --- a/ql/src/test/results/clientpositive/llap/pointlookup3.q.out +++ b/ql/src/test/results/clientpositive/llap/pointlookup3.q.out @@ -114,30 +114,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -147,10 +137,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -163,30 +151,20 @@ STAGE PLANS: ds1 2000-04-09 ds2 2001-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -196,10 +174,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -318,30 +294,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -351,10 +317,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -479,30 +443,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -512,10 +466,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -561,30 +513,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -594,10 +536,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -747,30 +687,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -780,10 +710,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -829,30 +757,20 @@ STAGE PLANS: ds1 2000-04-09 ds2 2001-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -862,10 +780,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -1011,30 +927,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1044,10 +950,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -1060,30 +964,20 @@ STAGE PLANS: ds1 2000-04-09 ds2 2001-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1093,10 +987,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -1141,30 +1033,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1174,10 +1056,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -1190,30 +1070,20 @@ STAGE PLANS: ds1 2000-04-09 ds2 2001-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1223,10 +1093,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -1239,30 +1107,20 @@ STAGE PLANS: ds1 2000-04-10 ds2 2001-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1272,10 +1130,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -1650,30 +1506,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1683,10 +1529,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -1699,30 +1543,20 @@ STAGE PLANS: ds1 2000-04-09 ds2 2001-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1732,10 +1566,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -1854,30 +1686,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1887,10 +1709,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -2015,30 +1835,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2048,10 +1858,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -2097,30 +1905,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2130,10 +1928,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -2283,30 +2079,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2316,10 +2102,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -2365,30 +2149,20 @@ STAGE PLANS: ds1 2000-04-09 ds2 2001-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2398,10 +2172,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -2539,30 +2311,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2572,10 +2334,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -2588,30 +2348,20 @@ STAGE PLANS: ds1 2000-04-09 ds2 2001-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2621,10 +2371,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -2669,30 +2417,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2702,10 +2440,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -2718,30 +2454,20 @@ STAGE PLANS: ds1 2000-04-09 ds2 2001-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2751,10 +2477,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 @@ -2767,30 +2491,20 @@ STAGE PLANS: ds1 2000-04-10 ds2 2001-04-10 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n1 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -2800,10 +2514,8 @@ STAGE PLANS: name default.pcr_t1_n1 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n1 name: default.pcr_t1_n1 diff --git a/ql/src/test/results/clientpositive/llap/pointlookup4.q.out b/ql/src/test/results/clientpositive/llap/pointlookup4.q.out index 3ca21d7460..0eb6a5be48 100644 --- a/ql/src/test/results/clientpositive/llap/pointlookup4.q.out +++ b/ql/src/test/results/clientpositive/llap/pointlookup4.q.out @@ -114,30 +114,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n0 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -147,10 +137,8 @@ STAGE PLANS: name default.pcr_t1_n0 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n0 name: default.pcr_t1_n0 @@ -163,30 +151,20 @@ STAGE PLANS: ds1 2000-04-09 ds2 2001-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n0 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -196,10 +174,8 @@ STAGE PLANS: name default.pcr_t1_n0 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n0 name: default.pcr_t1_n0 @@ -338,30 +314,20 @@ STAGE PLANS: ds1 2000-04-08 ds2 2001-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n0 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -371,10 +337,8 @@ STAGE PLANS: name default.pcr_t1_n0 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n0 name: default.pcr_t1_n0 @@ -387,30 +351,20 @@ STAGE PLANS: ds1 2000-04-09 ds2 2001-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1_n0 - numFiles 1 - numRows 20 partition_columns ds1/ds2 partition_columns.types string:string - rawDataSize 160 - serialization.ddl struct pcr_t1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -420,10 +374,8 @@ STAGE PLANS: name default.pcr_t1_n0 partition_columns ds1/ds2 partition_columns.types string:string - serialization.ddl struct pcr_t1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1_n0 name: default.pcr_t1_n0 diff --git a/ql/src/test/results/clientpositive/llap/ppd_join_filter.q.out b/ql/src/test/results/clientpositive/llap/ppd_join_filter.q.out index 5145494c27..52db4aaa1c 100644 --- a/ql/src/test/results/clientpositive/llap/ppd_join_filter.q.out +++ b/ql/src/test/results/clientpositive/llap/ppd_join_filter.q.out @@ -104,30 +104,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -135,14 +125,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -365,30 +349,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -396,14 +370,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -625,30 +593,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -656,14 +614,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -886,30 +838,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -917,14 +859,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/llap/ppd_union_view.q.out b/ql/src/test/results/clientpositive/llap/ppd_union_view.q.out index d16d28b64b..036474b446 100644 --- a/ql/src/test/results/clientpositive/llap/ppd_union_view.q.out +++ b/ql/src/test/results/clientpositive/llap/ppd_union_view.q.out @@ -261,30 +261,20 @@ STAGE PLANS: partition values: ds 2011-10-13 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"keymap":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns keymap,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_old - numFiles 1 - numRows 1 partition_columns ds partition_columns.types string - rawDataSize 14 - serialization.ddl struct t1_old { string keymap, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 15 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns keymap,value @@ -296,10 +286,8 @@ STAGE PLANS: name default.t1_old partition_columns ds partition_columns.types string - serialization.ddl struct t1_old { string keymap, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_old name: default.t1_old @@ -346,30 +334,20 @@ STAGE PLANS: partition values: ds 2011-10-13 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","keymap":"true"}} - bucket_count -1 column.name.delimiter , columns key,keymap - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_mapping - numFiles 1 - numRows 1 partition_columns ds partition_columns.types string - rawDataSize 12 - serialization.ddl struct t1_mapping { string key, string keymap} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 13 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,keymap @@ -381,10 +359,8 @@ STAGE PLANS: name default.t1_mapping partition_columns ds partition_columns.types string - serialization.ddl struct t1_mapping { string key, string keymap} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_mapping name: default.t1_mapping @@ -577,30 +553,20 @@ STAGE PLANS: partition values: ds 2011-10-15 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_new_n0 - numFiles 1 - numRows 1 partition_columns ds partition_columns.types string - rawDataSize 11 - serialization.ddl struct t1_new_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 12 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -612,10 +578,8 @@ STAGE PLANS: name default.t1_new_n0 partition_columns ds partition_columns.types string - serialization.ddl struct t1_new_n0 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_new_n0 name: default.t1_new_n0 diff --git a/ql/src/test/results/clientpositive/llap/ppd_vc.q.out b/ql/src/test/results/clientpositive/llap/ppd_vc.q.out index ebb3363172..8df5077aff 100644 --- a/ql/src/test/results/clientpositive/llap/ppd_vc.q.out +++ b/ql/src/test/results/clientpositive/llap/ppd_vc.q.out @@ -34,30 +34,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -67,10 +57,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -81,30 +69,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -114,10 +92,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -128,30 +104,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -161,10 +127,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -175,30 +139,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -208,10 +162,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -361,30 +313,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -392,14 +334,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -445,30 +381,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -478,10 +404,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -494,30 +418,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -527,10 +441,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -543,30 +455,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -576,10 +478,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -592,30 +492,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -625,10 +515,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart diff --git a/ql/src/test/results/clientpositive/llap/push_or.q.out b/ql/src/test/results/clientpositive/llap/push_or.q.out index 1ac850df8f..9df0ab7239 100644 --- a/ql/src/test/results/clientpositive/llap/push_or.q.out +++ b/ql/src/test/results/clientpositive/llap/push_or.q.out @@ -96,30 +96,20 @@ STAGE PLANS: partition values: ds 2000-04-08 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.push_or - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct push_or { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -129,10 +119,8 @@ STAGE PLANS: name default.push_or partition_columns ds partition_columns.types string - serialization.ddl struct push_or { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.push_or name: default.push_or @@ -144,30 +132,20 @@ STAGE PLANS: partition values: ds 2000-04-09 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.push_or - numFiles 1 - numRows 20 partition_columns ds partition_columns.types string - rawDataSize 160 - serialization.ddl struct push_or { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 180 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -177,10 +155,8 @@ STAGE PLANS: name default.push_or partition_columns ds partition_columns.types string - serialization.ddl struct push_or { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.push_or name: default.push_or diff --git a/ql/src/test/results/clientpositive/llap/rand_partitionpruner2.q.out b/ql/src/test/results/clientpositive/llap/rand_partitionpruner2.q.out index ef5509281a..13249e91f8 100644 --- a/ql/src/test/results/clientpositive/llap/rand_partitionpruner2.q.out +++ b/ql/src/test/results/clientpositive/llap/rand_partitionpruner2.q.out @@ -65,8 +65,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","hr":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,hr,ds @@ -74,14 +72,8 @@ STAGE PLANS: columns.types string:string:string:string #### A masked pattern was here #### name default.tmptable_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct tmptable_n1 { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable_n1 TotalFiles: 1 @@ -120,30 +112,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -153,10 +135,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -169,30 +149,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -202,10 +172,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -258,8 +226,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","hr":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,hr,ds @@ -267,14 +233,8 @@ STAGE PLANS: columns.types string:string:string:string #### A masked pattern was here #### name default.tmptable_n1 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct tmptable_n1 { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable_n1 diff --git a/ql/src/test/results/clientpositive/llap/router_join_ppr.q.out b/ql/src/test/results/clientpositive/llap/router_join_ppr.q.out index de20bb6209..8180eb7759 100644 --- a/ql/src/test/results/clientpositive/llap/router_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/llap/router_join_ppr.q.out @@ -82,30 +82,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -113,14 +103,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -166,30 +150,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -199,10 +173,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -215,30 +187,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -248,10 +210,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -427,30 +387,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -460,10 +410,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -476,30 +424,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -509,10 +447,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -556,30 +492,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -587,14 +513,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -766,30 +686,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -797,14 +707,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -850,30 +754,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -883,10 +777,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -899,30 +791,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -932,10 +814,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1111,30 +991,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1144,10 +1014,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1160,30 +1028,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1193,10 +1051,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -1240,30 +1096,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1271,14 +1117,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/llap/sample1.q.out b/ql/src/test/results/clientpositive/llap/sample1.q.out index 81a821d906..b257edc45c 100644 --- a/ql/src/test/results/clientpositive/llap/sample1.q.out +++ b/ql/src/test/results/clientpositive/llap/sample1.q.out @@ -63,8 +63,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"dt":"true","hr":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,dt,hr @@ -72,14 +70,8 @@ STAGE PLANS: columns.types int:string:string:string #### A masked pattern was here #### name default.dest1_n89 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n89 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n89 TotalFiles: 1 @@ -118,30 +110,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -151,10 +133,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -206,8 +186,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"dt":"true","hr":"true","key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value,dt,hr @@ -215,14 +193,8 @@ STAGE PLANS: columns.types int:string:string:string #### A masked pattern was here #### name default.dest1_n89 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n89 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n89 diff --git a/ql/src/test/results/clientpositive/llap/sample10.q.out b/ql/src/test/results/clientpositive/llap/sample10.q.out index e1226296c9..5cb190cfc2 100644 --- a/ql/src/test/results/clientpositive/llap/sample10.q.out +++ b/ql/src/test/results/clientpositive/llap/sample10.q.out @@ -112,25 +112,17 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 4 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 3 - numRows 10 partition_columns ds/hr partition_columns.types string:string - rawDataSize 60 - serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 295 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -147,10 +139,8 @@ STAGE PLANS: name default.srcpartbucket partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket @@ -163,25 +153,17 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 4 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 3 - numRows 10 partition_columns ds/hr partition_columns.types string:string - rawDataSize 60 - serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 295 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -198,10 +180,8 @@ STAGE PLANS: name default.srcpartbucket partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket @@ -214,25 +194,17 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 4 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 3 - numRows 10 partition_columns ds/hr partition_columns.types string:string - rawDataSize 60 - serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 295 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -249,10 +221,8 @@ STAGE PLANS: name default.srcpartbucket partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket @@ -265,25 +235,17 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 4 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 3 - numRows 10 partition_columns ds/hr partition_columns.types string:string - rawDataSize 60 - serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 295 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -300,10 +262,8 @@ STAGE PLANS: name default.srcpartbucket partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket diff --git a/ql/src/test/results/clientpositive/llap/sample5.q.out b/ql/src/test/results/clientpositive/llap/sample5.q.out index d36a43679f..e870e8c9b2 100644 --- a/ql/src/test/results/clientpositive/llap/sample5.q.out +++ b/ql/src/test/results/clientpositive/llap/sample5.q.out @@ -60,8 +60,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -69,14 +67,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.dest1_n69 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n69 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n69 TotalFiles: 1 @@ -112,30 +104,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -145,14 +128,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -204,8 +181,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -213,14 +188,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.dest1_n69 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n69 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n69 diff --git a/ql/src/test/results/clientpositive/llap/sample6.q.out b/ql/src/test/results/clientpositive/llap/sample6.q.out index cb4756329d..71141024c2 100644 --- a/ql/src/test/results/clientpositive/llap/sample6.q.out +++ b/ql/src/test/results/clientpositive/llap/sample6.q.out @@ -59,8 +59,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -68,14 +66,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.dest1_n27 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n27 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n27 TotalFiles: 1 @@ -111,30 +103,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -144,14 +127,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -203,8 +180,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -212,14 +187,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.dest1_n27 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n27 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n27 @@ -581,30 +550,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -614,14 +574,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -1005,30 +959,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -1038,14 +983,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -1652,30 +1591,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -1685,14 +1615,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -2181,30 +2105,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -2214,14 +2129,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -2650,30 +2559,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 4 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket2 - numFiles 4 - numRows 500 - rawDataSize 5312 - serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 4 bucket_field_name key bucketing_version 2 @@ -2683,14 +2583,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket2 - numFiles 4 - numRows 500 - rawDataSize 5312 - serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 name: default.srcbucket2 @@ -3045,30 +2939,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 4 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket2 - numFiles 4 - numRows 500 - rawDataSize 5312 - serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 4 bucket_field_name key bucketing_version 2 @@ -3078,14 +2963,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket2 - numFiles 4 - numRows 500 - rawDataSize 5312 - serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 name: default.srcbucket2 @@ -3331,30 +3210,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.empty_bucket - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct empty_bucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -3364,14 +3234,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.empty_bucket - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct empty_bucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.empty_bucket name: default.empty_bucket diff --git a/ql/src/test/results/clientpositive/llap/sample7.q.out b/ql/src/test/results/clientpositive/llap/sample7.q.out index 369a4c6ef4..e217faa782 100644 --- a/ql/src/test/results/clientpositive/llap/sample7.q.out +++ b/ql/src/test/results/clientpositive/llap/sample7.q.out @@ -61,8 +61,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -70,14 +68,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.dest1_n160 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n160 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n160 TotalFiles: 1 @@ -113,30 +105,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key bucketing_version 2 @@ -146,14 +129,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket - numFiles 2 - numRows 1000 - rawDataSize 10603 - serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11603 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -205,8 +182,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -214,14 +189,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.dest1_n160 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct dest1_n160 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1_n160 diff --git a/ql/src/test/results/clientpositive/llap/sample8.q.out b/ql/src/test/results/clientpositive/llap/sample8.q.out index cda918e8c4..3a1d2376bd 100644 --- a/ql/src/test/results/clientpositive/llap/sample8.q.out +++ b/ql/src/test/results/clientpositive/llap/sample8.q.out @@ -68,30 +68,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -101,10 +91,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -144,30 +132,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -177,10 +155,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -193,30 +169,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -226,10 +192,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -242,30 +206,20 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -275,10 +229,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -291,30 +243,20 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 1 - numRows 500 partition_columns ds/hr partition_columns.types string:string - rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -324,10 +266,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart diff --git a/ql/src/test/results/clientpositive/llap/sharedwork.q.out b/ql/src/test/results/clientpositive/llap/sharedwork.q.out index 175141fb9e..22b1cee6ac 100644 --- a/ql/src/test/results/clientpositive/llap/sharedwork.q.out +++ b/ql/src/test/results/clientpositive/llap/sharedwork.q.out @@ -168,30 +168,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_20":"true","col_3":"true","col_7":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns col_1,col_3,col_7,col_20 - columns.comments columns.types string:timestamp:string:string #### A masked pattern was here #### name default.my_table_0001 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct my_table_0001 { string col_1, timestamp col_3, string col_7, string col_20} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_20":"true","col_3":"true","col_7":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns col_1,col_3,col_7,col_20 @@ -199,14 +189,8 @@ STAGE PLANS: columns.types string:timestamp:string:string #### A masked pattern was here #### name default.my_table_0001 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct my_table_0001 { string col_1, timestamp col_3, string col_7, string col_20} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.my_table_0001 name: default.my_table_0001 @@ -268,30 +252,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_21":"true","col_24":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns col_24,col_21 - columns.comments columns.types string:string #### A masked pattern was here #### name default.my_table_0003 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct my_table_0003 { string col_24, string col_21} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_21":"true","col_24":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns col_24,col_21 @@ -299,14 +273,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.my_table_0003 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct my_table_0003 { string col_24, string col_21} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.my_table_0003 name: default.my_table_0003 @@ -349,30 +317,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_22":"true","col_23":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns col_1,col_22,col_23 - columns.comments columns.types string:string:int #### A masked pattern was here #### name default.my_table_0001_00 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct my_table_0001_00 { string col_1, string col_22, i32 col_23} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_22":"true","col_23":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns col_1,col_22,col_23 @@ -380,14 +338,8 @@ STAGE PLANS: columns.types string:string:int #### A masked pattern was here #### name default.my_table_0001_00 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct my_table_0001_00 { string col_1, string col_22, i32 col_23} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.my_table_0001_00 name: default.my_table_0001_00 @@ -429,30 +381,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_100":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns col_1,col_100 - columns.comments columns.types string:string #### A masked pattern was here #### name default.my_table_0001_01 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct my_table_0001_01 { string col_1, string col_100} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_100":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns col_1,col_100 @@ -460,14 +402,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.my_table_0001_01 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct my_table_0001_01 { string col_1, string col_100} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.my_table_0001_01 name: default.my_table_0001_01 @@ -722,30 +658,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment - columns.comments columns.types int:string:string:string:string:int:string:double:string #### A masked pattern was here #### name default.part - numFiles 1 - numRows 26 - rawDataSize 3147 - serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3173 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment @@ -753,14 +679,8 @@ STAGE PLANS: columns.types int:string:string:string:string:int:string:double:string #### A masked pattern was here #### name default.part - numFiles 1 - numRows 26 - rawDataSize 3147 - serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3173 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part name: default.part @@ -834,30 +754,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment - columns.comments columns.types int:string:string:string:string:int:string:double:string #### A masked pattern was here #### name default.part - numFiles 1 - numRows 26 - rawDataSize 3147 - serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3173 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment @@ -865,14 +775,8 @@ STAGE PLANS: columns.types int:string:string:string:string:int:string:double:string #### A masked pattern was here #### name default.part - numFiles 1 - numRows 26 - rawDataSize 3147 - serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3173 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part name: default.part @@ -930,30 +834,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment - columns.comments columns.types int:string:string:string:string:int:string:double:string #### A masked pattern was here #### name default.part - numFiles 1 - numRows 26 - rawDataSize 3147 - serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3173 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment @@ -961,14 +855,8 @@ STAGE PLANS: columns.types int:string:string:string:string:int:string:double:string #### A masked pattern was here #### name default.part - numFiles 1 - numRows 26 - rawDataSize 3147 - serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3173 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part name: default.part diff --git a/ql/src/test/results/clientpositive/llap/smb_mapjoin_15.q.out b/ql/src/test/results/clientpositive/llap/smb_mapjoin_15.q.out index dbc180ccae..c2299c7503 100644 --- a/ql/src/test/results/clientpositive/llap/smb_mapjoin_15.q.out +++ b/ql/src/test/results/clientpositive/llap/smb_mapjoin_15.q.out @@ -89,31 +89,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.test_table2_n4 - numFiles 16 - numRows 500 - rawDataSize 5312 - serialization.ddl struct test_table2_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -124,14 +115,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table2_n4 - numFiles 16 - numRows 500 - rawDataSize 5312 - serialization.ddl struct test_table2_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table2_n4 name: default.test_table2_n4 @@ -187,31 +172,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.test_table1_n4 - numFiles 16 - numRows 500 - rawDataSize 5312 - serialization.ddl struct test_table1_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -222,14 +198,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table1_n4 - numFiles 16 - numRows 500 - rawDataSize 5312 - serialization.ddl struct test_table1_n4 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1_n4 name: default.test_table1_n4 @@ -420,31 +390,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,key2,value - columns.comments columns.types int:int:string #### A masked pattern was here #### name default.test_table1_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table1_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -455,14 +416,8 @@ STAGE PLANS: columns.types int:int:string #### A masked pattern was here #### name default.test_table1_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table1_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1_n4 name: default.test_table1_n4 @@ -505,31 +460,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,key2,value - columns.comments columns.types int:int:string #### A masked pattern was here #### name default.test_table2_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table2_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -540,14 +486,8 @@ STAGE PLANS: columns.types int:int:string #### A masked pattern was here #### name default.test_table2_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table2_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table2_n4 name: default.test_table2_n4 @@ -717,31 +657,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,key2,value - columns.comments columns.types int:int:string #### A masked pattern was here #### name default.test_table1_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table1_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -752,14 +683,8 @@ STAGE PLANS: columns.types int:int:string #### A masked pattern was here #### name default.test_table1_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table1_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1_n4 name: default.test_table1_n4 @@ -802,31 +727,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,key2,value - columns.comments columns.types int:int:string #### A masked pattern was here #### name default.test_table2_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table2_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -837,14 +753,8 @@ STAGE PLANS: columns.types int:int:string #### A masked pattern was here #### name default.test_table2_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table2_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table2_n4 name: default.test_table2_n4 @@ -1014,31 +924,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,key2,value - columns.comments columns.types int:int:string #### A masked pattern was here #### name default.test_table1_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table1_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -1049,14 +950,8 @@ STAGE PLANS: columns.types int:int:string #### A masked pattern was here #### name default.test_table1_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table1_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1_n4 name: default.test_table1_n4 @@ -1099,31 +994,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key bucketing_version 2 column.name.delimiter , columns key,key2,value - columns.comments columns.types int:int:string #### A masked pattern was here #### name default.test_table2_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table2_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","key2":"true","value":"true"}} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -1134,14 +1020,8 @@ STAGE PLANS: columns.types int:int:string #### A masked pattern was here #### name default.test_table2_n4 - numFiles 16 - numRows 500 - rawDataSize 7218 - serialization.ddl struct test_table2_n4 { i32 key, i32 key2, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7718 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table2_n4 name: default.test_table2_n4 diff --git a/ql/src/test/results/clientpositive/llap/stats0.q.out b/ql/src/test/results/clientpositive/llap/stats0.q.out index 695ed643ab..2b08b61c17 100644 --- a/ql/src/test/results/clientpositive/llap/stats0.q.out +++ b/ql/src/test/results/clientpositive/llap/stats0.q.out @@ -56,8 +56,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -65,14 +63,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned TotalFiles: 1 @@ -108,30 +100,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -139,14 +121,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -198,8 +174,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -207,14 +181,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned @@ -1506,8 +1474,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1515,14 +1481,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned TotalFiles: 1 @@ -1558,30 +1518,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1589,14 +1539,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.src - numFiles 1 - numRows 500 - rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -1648,8 +1592,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -1657,14 +1599,8 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned diff --git a/ql/src/test/results/clientpositive/llap/stats11.q.out b/ql/src/test/results/clientpositive/llap/stats11.q.out index 71a1d9da15..b69b8e2c79 100644 --- a/ql/src/test/results/clientpositive/llap/stats11.q.out +++ b/ql/src/test/results/clientpositive/llap/stats11.q.out @@ -371,18 +371,11 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n15 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n15 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -397,14 +390,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n15 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n15 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n15 name: default.srcbucket_mapjoin_n15 @@ -453,20 +440,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n16 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n16 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -483,10 +463,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n16 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n16 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n16 name: default.srcbucket_mapjoin_part_n16 @@ -521,8 +499,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -530,14 +506,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n7 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n7 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n7 TotalFiles: 1 @@ -608,8 +578,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -617,14 +585,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n7 - numFiles 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result_n7 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n7 @@ -823,18 +785,11 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n15 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n15 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -849,14 +804,8 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_n15 - numFiles 2 - numRows 0 - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_n15 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_n15 name: default.srcbucket_mapjoin_n15 @@ -905,20 +854,13 @@ STAGE PLANS: bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_n16 - numFiles 4 - numRows 0 partition_columns ds partition_columns.types string - rawDataSize 0 - serialization.ddl struct srcbucket_mapjoin_part_n16 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -935,10 +877,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_n16 partition_columns ds partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_n16 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_n16 name: default.srcbucket_mapjoin_part_n16 @@ -973,8 +913,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -982,14 +920,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n7 - numFiles 1 - numRows 464 - rawDataSize 8519 - serialization.ddl struct bucketmapjoin_tmp_result_n7 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n7 TotalFiles: 1 @@ -1060,8 +992,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value1,value2 @@ -1069,14 +999,8 @@ STAGE PLANS: columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result_n7 - numFiles 1 - numRows 464 - rawDataSize 8519 - serialization.ddl struct bucketmapjoin_tmp_result_n7 { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result_n7 diff --git a/ql/src/test/results/clientpositive/llap/stats12.q.out b/ql/src/test/results/clientpositive/llap/stats12.q.out index b82bb0bfcd..e160da1d54 100644 --- a/ql/src/test/results/clientpositive/llap/stats12.q.out +++ b/ql/src/test/results/clientpositive/llap/stats12.q.out @@ -80,27 +80,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.analyze_srcpart_n3 - numFiles 1 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct analyze_srcpart_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -110,10 +103,8 @@ STAGE PLANS: name default.analyze_srcpart_n3 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct analyze_srcpart_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.analyze_srcpart_n3 name: default.analyze_srcpart_n3 @@ -126,27 +117,20 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.analyze_srcpart_n3 - numFiles 1 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct analyze_srcpart_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -156,10 +140,8 @@ STAGE PLANS: name default.analyze_srcpart_n3 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct analyze_srcpart_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.analyze_srcpart_n3 name: default.analyze_srcpart_n3 diff --git a/ql/src/test/results/clientpositive/llap/stats13.q.out b/ql/src/test/results/clientpositive/llap/stats13.q.out index 6954cbd0b1..efd2b256d3 100644 --- a/ql/src/test/results/clientpositive/llap/stats13.q.out +++ b/ql/src/test/results/clientpositive/llap/stats13.q.out @@ -76,27 +76,20 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.analyze_srcpart - numFiles 1 partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct analyze_srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -106,10 +99,8 @@ STAGE PLANS: name default.analyze_srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct analyze_srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.analyze_srcpart name: default.analyze_srcpart diff --git a/ql/src/test/results/clientpositive/llap/temp_table_alter_partition_coltype.q.out b/ql/src/test/results/clientpositive/llap/temp_table_alter_partition_coltype.q.out index ead9709817..ea8d982154 100644 --- a/ql/src/test/results/clientpositive/llap/temp_table_alter_partition_coltype.q.out +++ b/ql/src/test/results/clientpositive/llap/temp_table_alter_partition_coltype.q.out @@ -193,30 +193,20 @@ STAGE PLANS: dt 100 ts 3.0 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 1 - numRows 25 partition_columns dt/ts partition_columns.types string:double - rawDataSize 191 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -224,15 +214,10 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 0 - numRows 0 partition_columns dt/ts partition_columns.types string:double - rawDataSize 0 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype_temp name: default.alter_coltype_temp @@ -245,30 +230,20 @@ STAGE PLANS: dt 100 ts 6.30 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 1 - numRows 25 partition_columns dt/ts partition_columns.types string:double - rawDataSize 191 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -276,15 +251,10 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 0 - numRows 0 partition_columns dt/ts partition_columns.types string:double - rawDataSize 0 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype_temp name: default.alter_coltype_temp @@ -412,30 +382,20 @@ STAGE PLANS: dt 100 ts 3.0 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 1 - numRows 25 partition_columns dt/ts partition_columns.types string:double - rawDataSize 191 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -443,15 +403,10 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 0 - numRows 0 partition_columns dt/ts partition_columns.types string:double - rawDataSize 0 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype_temp name: default.alter_coltype_temp @@ -464,30 +419,20 @@ STAGE PLANS: dt 100 ts 6.30 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 1 - numRows 25 partition_columns dt/ts partition_columns.types string:double - rawDataSize 191 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -495,15 +440,10 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 0 - numRows 0 partition_columns dt/ts partition_columns.types string:double - rawDataSize 0 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype_temp name: default.alter_coltype_temp @@ -641,30 +581,20 @@ STAGE PLANS: dt 100 ts 3.0 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 1 - numRows 25 partition_columns dt/ts partition_columns.types string:double - rawDataSize 191 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -672,15 +602,10 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 0 - numRows 0 partition_columns dt/ts partition_columns.types string:double - rawDataSize 0 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype_temp name: default.alter_coltype_temp @@ -691,30 +616,20 @@ STAGE PLANS: dt 100 ts 6.30 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 1 - numRows 25 partition_columns dt/ts partition_columns.types string:double - rawDataSize 191 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 216 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -722,15 +637,10 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.alter_coltype_temp - numFiles 0 - numRows 0 partition_columns dt/ts partition_columns.types string:double - rawDataSize 0 - serialization.ddl struct alter_coltype_temp { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype_temp name: default.alter_coltype_temp @@ -895,30 +805,20 @@ STAGE PLANS: partcol1 1 partcol2 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} - bucket_count -1 column.name.delimiter , columns intcol - columns.comments columns.types string #### A masked pattern was here #### name pt.alterdynamic_part_table_temp - numFiles 2 - numRows 2 partition_columns partcol1/partcol2 partition_columns.types int:string - rawDataSize 3 - serialization.ddl struct alterdynamic_part_table_temp { string intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"intcol":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns intcol @@ -926,15 +826,10 @@ STAGE PLANS: columns.types string #### A masked pattern was here #### name pt.alterdynamic_part_table_temp - numFiles 0 - numRows 0 partition_columns partcol1/partcol2 partition_columns.types int:string - rawDataSize 0 - serialization.ddl struct alterdynamic_part_table_temp { string intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: pt.alterdynamic_part_table_temp name: pt.alterdynamic_part_table_temp @@ -976,30 +871,20 @@ STAGE PLANS: partcol1 2 partcol2 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} - bucket_count -1 column.name.delimiter , columns intcol - columns.comments columns.types string #### A masked pattern was here #### name pt.alterdynamic_part_table_temp - numFiles 1 - numRows 1 partition_columns partcol1/partcol2 partition_columns.types int:string - rawDataSize 1 - serialization.ddl struct alterdynamic_part_table_temp { string intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"intcol":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns intcol @@ -1007,15 +892,10 @@ STAGE PLANS: columns.types string #### A masked pattern was here #### name pt.alterdynamic_part_table_temp - numFiles 0 - numRows 0 partition_columns partcol1/partcol2 partition_columns.types int:string - rawDataSize 0 - serialization.ddl struct alterdynamic_part_table_temp { string intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: pt.alterdynamic_part_table_temp name: pt.alterdynamic_part_table_temp diff --git a/ql/src/test/results/clientpositive/llap/temp_table_display_colstats_tbllvl.q.out b/ql/src/test/results/clientpositive/llap/temp_table_display_colstats_tbllvl.q.out index 29fb49bdbd..e52adde9cd 100644 --- a/ql/src/test/results/clientpositive/llap/temp_table_display_colstats_tbllvl.q.out +++ b/ql/src/test/results/clientpositive/llap/temp_table_display_colstats_tbllvl.q.out @@ -288,25 +288,18 @@ STAGE PLANS: bucketing_version 2 column.name.delimiter , columns sourceip,desturl,visitdate,adrevenue,useragent,ccode,lcode,skeyword,avgtimeonsite - columns.comments columns.types string:string:string:float:string:string:string:string:int field.delim | #### A masked pattern was here #### name default.uservisits_web_text_none - numFiles 1 - numRows 0 - rawDataSize 0 - serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7060 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: EXTERNAL TRUE - bucket_count -1 bucketing_version 2 column.name.delimiter , columns sourceip,desturl,visitdate,adrevenue,useragent,ccode,lcode,skeyword,avgtimeonsite @@ -315,13 +308,8 @@ STAGE PLANS: field.delim | #### A masked pattern was here #### name default.uservisits_web_text_none - numFiles 1 - numRows 0 - rawDataSize 0 - serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7060 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.uservisits_web_text_none name: default.uservisits_web_text_none diff --git a/ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out b/ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out index bbb7d37fee..19c69b8c87 100644 --- a/ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out +++ b/ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out @@ -579,30 +579,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"business_dept_object_id":"true","business_partner_percentage":"true","charge_code_object_id":"true","date_key":"true","fy_month_key":"true","fy_quarter_key":"true","fy_year_key":"true","month_key":"true","plan_detail_object_id":"true","project_object_id":"true","quarter_key":"true","resource_object_id":"true","slice_date":"true","split_amount":"true","split_units":"true","supplier_object_id":"true","transclass_object_id":"true","week_key":"true","year_key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns plan_detail_object_id,project_object_id,charge_code_object_id,transclass_object_id,resource_object_id,slice_date,split_amount,split_units,year_key,quarter_key,month_key,week_key,date_key,fy_year_key,fy_quarter_key,fy_month_key,supplier_object_id,business_dept_object_id,business_partner_percentage - columns.comments columns.types bigint:bigint:bigint:bigint:bigint:varchar(50):varchar(50):varchar(50):varchar(20):varchar(20):varchar(50):varchar(50):varchar(50):varchar(50):string:string:bigint:bigint:decimal(38,8) #### A masked pattern was here #### name default.l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 - numFiles 1 - numRows 15 - rawDataSize 16430 - serialization.ddl struct l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 { i64 plan_detail_object_id, i64 project_object_id, i64 charge_code_object_id, i64 transclass_object_id, i64 resource_object_id, varchar(50) slice_date, varchar(50) split_amount, varchar(50) split_units, varchar(20) year_key, varchar(20) quarter_key, varchar(50) month_key, varchar(50) week_key, varchar(50) date_key, varchar(50) fy_year_key, string fy_quarter_key, string fy_month_key, i64 supplier_object_id, i64 business_dept_object_id, decimal(38,8) business_partner_percentage} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 3483 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"business_dept_object_id":"true","business_partner_percentage":"true","charge_code_object_id":"true","date_key":"true","fy_month_key":"true","fy_quarter_key":"true","fy_year_key":"true","month_key":"true","plan_detail_object_id":"true","project_object_id":"true","quarter_key":"true","resource_object_id":"true","slice_date":"true","split_amount":"true","split_units":"true","supplier_object_id":"true","transclass_object_id":"true","week_key":"true","year_key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns plan_detail_object_id,project_object_id,charge_code_object_id,transclass_object_id,resource_object_id,slice_date,split_amount,split_units,year_key,quarter_key,month_key,week_key,date_key,fy_year_key,fy_quarter_key,fy_month_key,supplier_object_id,business_dept_object_id,business_partner_percentage @@ -610,14 +600,8 @@ STAGE PLANS: columns.types bigint:bigint:bigint:bigint:bigint:varchar(50):varchar(50):varchar(50):varchar(20):varchar(20):varchar(50):varchar(50):varchar(50):varchar(50):string:string:bigint:bigint:decimal(38,8) #### A masked pattern was here #### name default.l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 - numFiles 1 - numRows 15 - rawDataSize 16430 - serialization.ddl struct l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 { i64 plan_detail_object_id, i64 project_object_id, i64 charge_code_object_id, i64 transclass_object_id, i64 resource_object_id, varchar(50) slice_date, varchar(50) split_amount, varchar(50) split_units, varchar(20) year_key, varchar(20) quarter_key, varchar(50) month_key, varchar(50) week_key, varchar(50) date_key, varchar(50) fy_year_key, string fy_quarter_key, string fy_month_key, i64 supplier_object_id, i64 business_dept_object_id, decimal(38,8) business_partner_percentage} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 3483 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 name: default.l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 @@ -653,30 +637,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"l3_snapshot_number":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns l3_snapshot_number - columns.comments columns.types bigint #### A masked pattern was here #### name default.l3_clarity__l3_snap_number_2018022300104 - numFiles 1 - numRows 1 - rawDataSize 6 - serialization.ddl struct l3_clarity__l3_snap_number_2018022300104 { i64 l3_snapshot_number} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"l3_snapshot_number":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns l3_snapshot_number @@ -684,14 +658,8 @@ STAGE PLANS: columns.types bigint #### A masked pattern was here #### name default.l3_clarity__l3_snap_number_2018022300104 - numFiles 1 - numRows 1 - rawDataSize 6 - serialization.ddl struct l3_clarity__l3_snap_number_2018022300104 { i64 l3_snapshot_number} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.l3_clarity__l3_snap_number_2018022300104 name: default.l3_clarity__l3_snap_number_2018022300104 @@ -734,30 +702,21 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"bmo_cost_type":"true","bmo_fiscal_year":"true","charge_code_key":"true","charge_code_object_id":"true","clarity_updated_date":"true","finplan_detail_object_id":"true","idp_audit_id":"true","idp_data_date":"true","idp_warehouse_id":"true","is_latest_snapshot":"true","l3_created_date":"true","l3_snapshot_number":"true","last_updated_by":"true","latest_fiscal_budget_plan":"true","percentage":"true","period_end":"true","period_start":"true","period_type":"true","plan_category":"true","plan_code":"true","plan_description":"true","plan_key":"true","plan_name":"true","plan_of_record":"true","plan_status":"true","plan_type":"true","project_key":"true","project_object_id":"true","resoruce_object_id":"true","resource_key":"true","transclass_key":"true","txn_class_object_id":"true"}} bucket_count 64 bucket_field_name idp_data_date bucketing_version 2 column.name.delimiter , columns idp_warehouse_id,idp_audit_id,idp_data_date,l3_snapshot_number,plan_key,project_key,charge_code_key,transclass_key,resource_key,finplan_detail_object_id,project_object_id,txn_class_object_id,charge_code_object_id,resoruce_object_id,plan_name,plan_code,plan_type,period_type,plan_description,plan_status,period_start,period_end,plan_of_record,percentage,l3_created_date,bmo_cost_type,bmo_fiscal_year,clarity_updated_date,is_latest_snapshot,latest_fiscal_budget_plan,plan_category,last_updated_by - columns.comments columns.types bigint:bigint:date:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:varchar(1500):varchar(500):varchar(50):varchar(50):varchar(3000):varchar(50):varchar(50):varchar(50):varchar(1):decimal(32,6):timestamp:varchar(30):varchar(50):timestamp:bigint:bigint:varchar(70):varchar(250) #### A masked pattern was here #### name default.l3_monthly_dw_dimplan - numFiles 1 - numRows 180340 - rawDataSize 269826156 - serialization.ddl struct l3_monthly_dw_dimplan { i64 idp_warehouse_id, i64 idp_audit_id, date idp_data_date, i64 l3_snapshot_number, i64 plan_key, i64 project_key, i64 charge_code_key, i64 transclass_key, i64 resource_key, i64 finplan_detail_object_id, i64 project_object_id, i64 txn_class_object_id, i64 charge_code_object_id, i64 resoruce_object_id, varchar(1500) plan_name, varchar(500) plan_code, varchar(50) plan_type, varchar(50) period_type, varchar(3000) plan_description, varchar(50) plan_status, varchar(50) period_start, varchar(50) period_end, varchar(1) plan_of_record, decimal(32,6) percentage, timestamp l3_created_date, varchar(30) bmo_cost_type, varchar(50) bmo_fiscal_year, timestamp clarity_updated_date, i64 is_latest_snapshot, i64 latest_fiscal_budget_plan, varchar(70) plan_category, varchar(250) last_updated_by} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 5242699 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"bmo_cost_type":"true","bmo_fiscal_year":"true","charge_code_key":"true","charge_code_object_id":"true","clarity_updated_date":"true","finplan_detail_object_id":"true","idp_audit_id":"true","idp_data_date":"true","idp_warehouse_id":"true","is_latest_snapshot":"true","l3_created_date":"true","l3_snapshot_number":"true","last_updated_by":"true","latest_fiscal_budget_plan":"true","percentage":"true","period_end":"true","period_start":"true","period_type":"true","plan_category":"true","plan_code":"true","plan_description":"true","plan_key":"true","plan_name":"true","plan_of_record":"true","plan_status":"true","plan_type":"true","project_key":"true","project_object_id":"true","resoruce_object_id":"true","resource_key":"true","transclass_key":"true","txn_class_object_id":"true"}} bucket_count 64 bucket_field_name idp_data_date bucketing_version 2 @@ -767,14 +726,8 @@ STAGE PLANS: columns.types bigint:bigint:date:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:varchar(1500):varchar(500):varchar(50):varchar(50):varchar(3000):varchar(50):varchar(50):varchar(50):varchar(1):decimal(32,6):timestamp:varchar(30):varchar(50):timestamp:bigint:bigint:varchar(70):varchar(250) #### A masked pattern was here #### name default.l3_monthly_dw_dimplan - numFiles 1 - numRows 180340 - rawDataSize 269826156 - serialization.ddl struct l3_monthly_dw_dimplan { i64 idp_warehouse_id, i64 idp_audit_id, date idp_data_date, i64 l3_snapshot_number, i64 plan_key, i64 project_key, i64 charge_code_key, i64 transclass_key, i64 resource_key, i64 finplan_detail_object_id, i64 project_object_id, i64 txn_class_object_id, i64 charge_code_object_id, i64 resoruce_object_id, varchar(1500) plan_name, varchar(500) plan_code, varchar(50) plan_type, varchar(50) period_type, varchar(3000) plan_description, varchar(50) plan_status, varchar(50) period_start, varchar(50) period_end, varchar(1) plan_of_record, decimal(32,6) percentage, timestamp l3_created_date, varchar(30) bmo_cost_type, varchar(50) bmo_fiscal_year, timestamp clarity_updated_date, i64 is_latest_snapshot, i64 latest_fiscal_budget_plan, varchar(70) plan_category, varchar(250) last_updated_by} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 5242699 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.l3_monthly_dw_dimplan name: default.l3_monthly_dw_dimplan @@ -817,30 +770,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"idp_data_date":"true","l3_created_date":"true","l3_snapshot_number":"true","project_key":"true","project_object_id":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns project_key,l3_snapshot_number,l3_created_date,project_object_id,idp_data_date - columns.comments columns.types bigint:bigint:timestamp:bigint:date #### A masked pattern was here #### name default.l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 - numFiles 1 - numRows 1 - rawDataSize 120 - serialization.ddl struct l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 { i64 project_key, i64 l3_snapshot_number, timestamp l3_created_date, i64 project_object_id, date idp_data_date} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 677 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"idp_data_date":"true","l3_created_date":"true","l3_snapshot_number":"true","project_key":"true","project_object_id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns project_key,l3_snapshot_number,l3_created_date,project_object_id,idp_data_date @@ -848,14 +791,8 @@ STAGE PLANS: columns.types bigint:bigint:timestamp:bigint:date #### A masked pattern was here #### name default.l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 - numFiles 1 - numRows 1 - rawDataSize 120 - serialization.ddl struct l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 { i64 project_key, i64 l3_snapshot_number, timestamp l3_created_date, i64 project_object_id, date idp_data_date} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 677 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 name: default.l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 @@ -1104,30 +1041,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"business_dept_object_id":"true","business_partner_percentage":"true","charge_code_object_id":"true","date_key":"true","fy_month_key":"true","fy_quarter_key":"true","fy_year_key":"true","month_key":"true","plan_detail_object_id":"true","project_object_id":"true","quarter_key":"true","resource_object_id":"true","slice_date":"true","split_amount":"true","split_units":"true","supplier_object_id":"true","transclass_object_id":"true","week_key":"true","year_key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns plan_detail_object_id,project_object_id,charge_code_object_id,transclass_object_id,resource_object_id,slice_date,split_amount,split_units,year_key,quarter_key,month_key,week_key,date_key,fy_year_key,fy_quarter_key,fy_month_key,supplier_object_id,business_dept_object_id,business_partner_percentage - columns.comments columns.types bigint:bigint:bigint:bigint:bigint:varchar(50):varchar(50):varchar(50):varchar(20):varchar(20):varchar(50):varchar(50):varchar(50):varchar(50):string:string:bigint:bigint:decimal(38,8) #### A masked pattern was here #### name default.l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 - numFiles 1 - numRows 15 - rawDataSize 16430 - serialization.ddl struct l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 { i64 plan_detail_object_id, i64 project_object_id, i64 charge_code_object_id, i64 transclass_object_id, i64 resource_object_id, varchar(50) slice_date, varchar(50) split_amount, varchar(50) split_units, varchar(20) year_key, varchar(20) quarter_key, varchar(50) month_key, varchar(50) week_key, varchar(50) date_key, varchar(50) fy_year_key, string fy_quarter_key, string fy_month_key, i64 supplier_object_id, i64 business_dept_object_id, decimal(38,8) business_partner_percentage} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 3483 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"business_dept_object_id":"true","business_partner_percentage":"true","charge_code_object_id":"true","date_key":"true","fy_month_key":"true","fy_quarter_key":"true","fy_year_key":"true","month_key":"true","plan_detail_object_id":"true","project_object_id":"true","quarter_key":"true","resource_object_id":"true","slice_date":"true","split_amount":"true","split_units":"true","supplier_object_id":"true","transclass_object_id":"true","week_key":"true","year_key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns plan_detail_object_id,project_object_id,charge_code_object_id,transclass_object_id,resource_object_id,slice_date,split_amount,split_units,year_key,quarter_key,month_key,week_key,date_key,fy_year_key,fy_quarter_key,fy_month_key,supplier_object_id,business_dept_object_id,business_partner_percentage @@ -1135,14 +1062,8 @@ STAGE PLANS: columns.types bigint:bigint:bigint:bigint:bigint:varchar(50):varchar(50):varchar(50):varchar(20):varchar(20):varchar(50):varchar(50):varchar(50):varchar(50):string:string:bigint:bigint:decimal(38,8) #### A masked pattern was here #### name default.l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 - numFiles 1 - numRows 15 - rawDataSize 16430 - serialization.ddl struct l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 { i64 plan_detail_object_id, i64 project_object_id, i64 charge_code_object_id, i64 transclass_object_id, i64 resource_object_id, varchar(50) slice_date, varchar(50) split_amount, varchar(50) split_units, varchar(20) year_key, varchar(20) quarter_key, varchar(50) month_key, varchar(50) week_key, varchar(50) date_key, varchar(50) fy_year_key, string fy_quarter_key, string fy_month_key, i64 supplier_object_id, i64 business_dept_object_id, decimal(38,8) business_partner_percentage} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 3483 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 name: default.l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104_1 @@ -1178,30 +1099,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"l3_snapshot_number":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns l3_snapshot_number - columns.comments columns.types bigint #### A masked pattern was here #### name default.l3_clarity__l3_snap_number_2018022300104 - numFiles 1 - numRows 1 - rawDataSize 6 - serialization.ddl struct l3_clarity__l3_snap_number_2018022300104 { i64 l3_snapshot_number} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"l3_snapshot_number":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns l3_snapshot_number @@ -1209,14 +1120,8 @@ STAGE PLANS: columns.types bigint #### A masked pattern was here #### name default.l3_clarity__l3_snap_number_2018022300104 - numFiles 1 - numRows 1 - rawDataSize 6 - serialization.ddl struct l3_clarity__l3_snap_number_2018022300104 { i64 l3_snapshot_number} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 7 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.l3_clarity__l3_snap_number_2018022300104 name: default.l3_clarity__l3_snap_number_2018022300104 @@ -1259,30 +1164,21 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"bmo_cost_type":"true","bmo_fiscal_year":"true","charge_code_key":"true","charge_code_object_id":"true","clarity_updated_date":"true","finplan_detail_object_id":"true","idp_audit_id":"true","idp_data_date":"true","idp_warehouse_id":"true","is_latest_snapshot":"true","l3_created_date":"true","l3_snapshot_number":"true","last_updated_by":"true","latest_fiscal_budget_plan":"true","percentage":"true","period_end":"true","period_start":"true","period_type":"true","plan_category":"true","plan_code":"true","plan_description":"true","plan_key":"true","plan_name":"true","plan_of_record":"true","plan_status":"true","plan_type":"true","project_key":"true","project_object_id":"true","resoruce_object_id":"true","resource_key":"true","transclass_key":"true","txn_class_object_id":"true"}} bucket_count 64 bucket_field_name idp_data_date bucketing_version 2 column.name.delimiter , columns idp_warehouse_id,idp_audit_id,idp_data_date,l3_snapshot_number,plan_key,project_key,charge_code_key,transclass_key,resource_key,finplan_detail_object_id,project_object_id,txn_class_object_id,charge_code_object_id,resoruce_object_id,plan_name,plan_code,plan_type,period_type,plan_description,plan_status,period_start,period_end,plan_of_record,percentage,l3_created_date,bmo_cost_type,bmo_fiscal_year,clarity_updated_date,is_latest_snapshot,latest_fiscal_budget_plan,plan_category,last_updated_by - columns.comments columns.types bigint:bigint:date:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:varchar(1500):varchar(500):varchar(50):varchar(50):varchar(3000):varchar(50):varchar(50):varchar(50):varchar(1):decimal(32,6):timestamp:varchar(30):varchar(50):timestamp:bigint:bigint:varchar(70):varchar(250) #### A masked pattern was here #### name default.l3_monthly_dw_dimplan - numFiles 1 - numRows 180340 - rawDataSize 269826156 - serialization.ddl struct l3_monthly_dw_dimplan { i64 idp_warehouse_id, i64 idp_audit_id, date idp_data_date, i64 l3_snapshot_number, i64 plan_key, i64 project_key, i64 charge_code_key, i64 transclass_key, i64 resource_key, i64 finplan_detail_object_id, i64 project_object_id, i64 txn_class_object_id, i64 charge_code_object_id, i64 resoruce_object_id, varchar(1500) plan_name, varchar(500) plan_code, varchar(50) plan_type, varchar(50) period_type, varchar(3000) plan_description, varchar(50) plan_status, varchar(50) period_start, varchar(50) period_end, varchar(1) plan_of_record, decimal(32,6) percentage, timestamp l3_created_date, varchar(30) bmo_cost_type, varchar(50) bmo_fiscal_year, timestamp clarity_updated_date, i64 is_latest_snapshot, i64 latest_fiscal_budget_plan, varchar(70) plan_category, varchar(250) last_updated_by} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 5242699 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"bmo_cost_type":"true","bmo_fiscal_year":"true","charge_code_key":"true","charge_code_object_id":"true","clarity_updated_date":"true","finplan_detail_object_id":"true","idp_audit_id":"true","idp_data_date":"true","idp_warehouse_id":"true","is_latest_snapshot":"true","l3_created_date":"true","l3_snapshot_number":"true","last_updated_by":"true","latest_fiscal_budget_plan":"true","percentage":"true","period_end":"true","period_start":"true","period_type":"true","plan_category":"true","plan_code":"true","plan_description":"true","plan_key":"true","plan_name":"true","plan_of_record":"true","plan_status":"true","plan_type":"true","project_key":"true","project_object_id":"true","resoruce_object_id":"true","resource_key":"true","transclass_key":"true","txn_class_object_id":"true"}} bucket_count 64 bucket_field_name idp_data_date bucketing_version 2 @@ -1292,14 +1188,8 @@ STAGE PLANS: columns.types bigint:bigint:date:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:bigint:varchar(1500):varchar(500):varchar(50):varchar(50):varchar(3000):varchar(50):varchar(50):varchar(50):varchar(1):decimal(32,6):timestamp:varchar(30):varchar(50):timestamp:bigint:bigint:varchar(70):varchar(250) #### A masked pattern was here #### name default.l3_monthly_dw_dimplan - numFiles 1 - numRows 180340 - rawDataSize 269826156 - serialization.ddl struct l3_monthly_dw_dimplan { i64 idp_warehouse_id, i64 idp_audit_id, date idp_data_date, i64 l3_snapshot_number, i64 plan_key, i64 project_key, i64 charge_code_key, i64 transclass_key, i64 resource_key, i64 finplan_detail_object_id, i64 project_object_id, i64 txn_class_object_id, i64 charge_code_object_id, i64 resoruce_object_id, varchar(1500) plan_name, varchar(500) plan_code, varchar(50) plan_type, varchar(50) period_type, varchar(3000) plan_description, varchar(50) plan_status, varchar(50) period_start, varchar(50) period_end, varchar(1) plan_of_record, decimal(32,6) percentage, timestamp l3_created_date, varchar(30) bmo_cost_type, varchar(50) bmo_fiscal_year, timestamp clarity_updated_date, i64 is_latest_snapshot, i64 latest_fiscal_budget_plan, varchar(70) plan_category, varchar(250) last_updated_by} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 5242699 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.l3_monthly_dw_dimplan name: default.l3_monthly_dw_dimplan @@ -1342,30 +1232,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"idp_data_date":"true","l3_created_date":"true","l3_snapshot_number":"true","project_key":"true","project_object_id":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns project_key,l3_snapshot_number,l3_created_date,project_object_id,idp_data_date - columns.comments columns.types bigint:bigint:timestamp:bigint:date #### A masked pattern was here #### name default.l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 - numFiles 1 - numRows 1 - rawDataSize 120 - serialization.ddl struct l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 { i64 project_key, i64 l3_snapshot_number, timestamp l3_created_date, i64 project_object_id, date idp_data_date} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 677 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"idp_data_date":"true","l3_created_date":"true","l3_snapshot_number":"true","project_key":"true","project_object_id":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns project_key,l3_snapshot_number,l3_created_date,project_object_id,idp_data_date @@ -1373,14 +1253,8 @@ STAGE PLANS: columns.types bigint:bigint:timestamp:bigint:date #### A masked pattern was here #### name default.l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 - numFiles 1 - numRows 1 - rawDataSize 120 - serialization.ddl struct l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 { i64 project_key, i64 l3_snapshot_number, timestamp l3_created_date, i64 project_object_id, date idp_data_date} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 677 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 name: default.l3_clarity__l3_monthly_dw_factplan_datajoin_1_s2_2018022300104_1 @@ -1567,30 +1441,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_2":"true","col_3":"true"}} bucket_count 4 bucket_field_name col_1 bucketing_version 2 column.name.delimiter , columns col_1,col_2,col_3 - columns.comments columns.types int:string:string #### A masked pattern was here #### name default.test_table - numFiles 3 - numRows 4 - rawDataSize 42 - serialization.ddl struct test_table { i32 col_1, string col_2, string col_3} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 46 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_2":"true","col_3":"true"}} bucket_count 4 bucket_field_name col_1 bucketing_version 2 @@ -1600,14 +1465,8 @@ STAGE PLANS: columns.types int:string:string #### A masked pattern was here #### name default.test_table - numFiles 3 - numRows 4 - rawDataSize 42 - serialization.ddl struct test_table { i32 col_1, string col_2, string col_3} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 46 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table name: default.test_table @@ -1725,30 +1584,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_2":"true","col_3":"true"}} bucket_count 4 bucket_field_name col_1 bucketing_version 2 column.name.delimiter , columns col_1,col_2,col_3 - columns.comments columns.types int:string:string #### A masked pattern was here #### name default.test_table - numFiles 3 - numRows 4 - rawDataSize 42 - serialization.ddl struct test_table { i32 col_1, string col_2, string col_3} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 46 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_2":"true","col_3":"true"}} bucket_count 4 bucket_field_name col_1 bucketing_version 2 @@ -1758,14 +1608,8 @@ STAGE PLANS: columns.types int:string:string #### A masked pattern was here #### name default.test_table - numFiles 3 - numRows 4 - rawDataSize 42 - serialization.ddl struct test_table { i32 col_1, string col_2, string col_3} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 46 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table name: default.test_table diff --git a/ql/src/test/results/clientpositive/llap/topnkey_windowing.q.out b/ql/src/test/results/clientpositive/llap/topnkey_windowing.q.out index 6bf0dd418e..6f01a086be 100644 --- a/ql/src/test/results/clientpositive/llap/topnkey_windowing.q.out +++ b/ql/src/test/results/clientpositive/llap/topnkey_windowing.q.out @@ -428,30 +428,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"tw_code":"true","tw_value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns tw_code,tw_value - columns.comments columns.types string:double #### A masked pattern was here #### name default.topnkey_windowing - numFiles 1 - numRows 26 - rawDataSize 176 - serialization.ddl struct topnkey_windowing { string tw_code, double tw_value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 202 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"tw_code":"true","tw_value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns tw_code,tw_value @@ -459,14 +449,8 @@ STAGE PLANS: columns.types string:double #### A masked pattern was here #### name default.topnkey_windowing - numFiles 1 - numRows 26 - rawDataSize 176 - serialization.ddl struct topnkey_windowing { string tw_code, double tw_value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 202 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.topnkey_windowing name: default.topnkey_windowing @@ -633,30 +617,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"tw_code":"true","tw_value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns tw_code,tw_value - columns.comments columns.types string:double #### A masked pattern was here #### name default.topnkey_windowing - numFiles 1 - numRows 26 - rawDataSize 176 - serialization.ddl struct topnkey_windowing { string tw_code, double tw_value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 202 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"tw_code":"true","tw_value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns tw_code,tw_value @@ -664,14 +638,8 @@ STAGE PLANS: columns.types string:double #### A masked pattern was here #### name default.topnkey_windowing - numFiles 1 - numRows 26 - rawDataSize 176 - serialization.ddl struct topnkey_windowing { string tw_code, double tw_value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 202 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.topnkey_windowing name: default.topnkey_windowing diff --git a/ql/src/test/results/clientpositive/llap/vectorization_0.q.out b/ql/src/test/results/clientpositive/llap/vectorization_0.q.out index 2c00a799d6..ad6b424ce7 100644 --- a/ql/src/test/results/clientpositive/llap/vectorization_0.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorization_0.q.out @@ -1323,30 +1323,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 - columns.comments columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesorc - numFiles 1 - numRows 12288 - rawDataSize 2907994 - serialization.ddl struct alltypesorc { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295616 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 @@ -1354,14 +1344,8 @@ STAGE PLANS: columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesorc - numFiles 1 - numRows 12288 - rawDataSize 2907994 - serialization.ddl struct alltypesorc { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295616 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.alltypesorc name: default.alltypesorc @@ -30158,30 +30142,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 - columns.comments columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesorc - numFiles 1 - numRows 12288 - rawDataSize 2907994 - serialization.ddl struct alltypesorc { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295616 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 @@ -30189,14 +30163,8 @@ STAGE PLANS: columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesorc - numFiles 1 - numRows 12288 - rawDataSize 2907994 - serialization.ddl struct alltypesorc { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295616 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.alltypesorc name: default.alltypesorc @@ -30285,30 +30253,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 - columns.comments columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesorc - numFiles 1 - numRows 12288 - rawDataSize 2907994 - serialization.ddl struct alltypesorc { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295616 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 @@ -30316,14 +30274,8 @@ STAGE PLANS: columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesorc - numFiles 1 - numRows 12288 - rawDataSize 2907994 - serialization.ddl struct alltypesorc { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295616 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.alltypesorc name: default.alltypesorc @@ -30412,30 +30364,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 - columns.comments columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesorc - numFiles 1 - numRows 12288 - rawDataSize 2907994 - serialization.ddl struct alltypesorc { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295616 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 @@ -30443,14 +30385,8 @@ STAGE PLANS: columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesorc - numFiles 1 - numRows 12288 - rawDataSize 2907994 - serialization.ddl struct alltypesorc { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295616 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.alltypesorc name: default.alltypesorc @@ -30533,30 +30469,20 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 - columns.comments columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesorc - numFiles 1 - numRows 12288 - rawDataSize 2907994 - serialization.ddl struct alltypesorc { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295616 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 @@ -30564,14 +30490,8 @@ STAGE PLANS: columns.types tinyint:smallint:int:bigint:float:double:string:string:timestamp:timestamp:boolean:boolean #### A masked pattern was here #### name default.alltypesorc - numFiles 1 - numRows 12288 - rawDataSize 2907994 - serialization.ddl struct alltypesorc { byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, float cfloat, double cdouble, string cstring1, string cstring2, timestamp ctimestamp1, timestamp ctimestamp2, bool cboolean1, bool cboolean2} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 295616 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.alltypesorc name: default.alltypesorc diff --git a/ql/src/test/results/clientpositive/regexp_extract.q.out b/ql/src/test/results/clientpositive/regexp_extract.q.out index 95f7c22bc9..bf4628691e 100644 --- a/ql/src/test/results/clientpositive/regexp_extract.q.out +++ b/ql/src/test/results/clientpositive/regexp_extract.q.out @@ -75,30 +75,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -109,11 +103,9 @@ STAGE PLANS: numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -337,30 +329,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -371,11 +357,9 @@ STAGE PLANS: numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/serde_user_properties.q.out b/ql/src/test/results/clientpositive/serde_user_properties.q.out index ac2b2ee6c9..7b7bd291ec 100644 --- a/ql/src/test/results/clientpositive/serde_user_properties.q.out +++ b/ql/src/test/results/clientpositive/serde_user_properties.q.out @@ -116,30 +116,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -150,11 +144,9 @@ STAGE PLANS: numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -259,31 +251,25 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### user.defined.key some.value serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -294,11 +280,9 @@ STAGE PLANS: numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### user.defined.key some.value serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -404,31 +388,25 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### user.defined.key some.value serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -439,11 +417,9 @@ STAGE PLANS: numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### user.defined.key some.value serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src diff --git a/ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out b/ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out index 1142daba9c..9fb50bd42e 100644 --- a/ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out +++ b/ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out @@ -118,12 +118,10 @@ STAGE PLANS: partition values: part 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 1 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n7 @@ -132,11 +130,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_1_n7 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -154,10 +150,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n7 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n7 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n7 name: default.srcbucket_mapjoin_part_1_n7 diff --git a/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out b/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out index 17f3b0b360..9c474ea80e 100644 --- a/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out +++ b/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out @@ -85,12 +85,10 @@ STAGE PLANS: partition values: part 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n8 @@ -99,11 +97,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_2_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -120,10 +116,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n8 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n8 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n8 name: default.srcbucket_mapjoin_part_2_n8 @@ -201,12 +195,10 @@ STAGE PLANS: partition values: part 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n3 @@ -215,11 +207,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_1_n3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -237,10 +227,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n3 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n3 name: default.srcbucket_mapjoin_part_1_n3 diff --git a/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out b/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out index 51bb46b399..1b5351025e 100644 --- a/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out +++ b/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out @@ -125,12 +125,10 @@ STAGE PLANS: partition values: part 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key,value column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n2 @@ -139,11 +137,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_2_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -160,10 +156,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n2 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n2 name: default.srcbucket_mapjoin_part_2_n2 @@ -173,12 +167,10 @@ STAGE PLANS: partition values: part 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key,value column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2_n2 @@ -187,11 +179,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_2_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -208,10 +198,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_2_n2 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_2_n2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2_n2 name: default.srcbucket_mapjoin_part_2_n2 @@ -281,12 +269,10 @@ STAGE PLANS: partition values: part 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key,value column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n0 @@ -295,11 +281,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -316,10 +300,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n0 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n0 name: default.srcbucket_mapjoin_part_1_n0 @@ -331,12 +313,10 @@ STAGE PLANS: partition values: part 2 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count 2 bucket_field_name key,value column.name.delimiter , columns key,value - columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1_n0 @@ -345,11 +325,9 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 5312 - serialization.ddl struct srcbucket_mapjoin_part_1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -366,10 +344,8 @@ STAGE PLANS: name default.srcbucket_mapjoin_part_1_n0 partition_columns part partition_columns.types string - serialization.ddl struct srcbucket_mapjoin_part_1_n0 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1_n0 name: default.srcbucket_mapjoin_part_1_n0 diff --git a/ql/src/test/results/clientpositive/temp_table_partition_pruning.q.out b/ql/src/test/results/clientpositive/temp_table_partition_pruning.q.out index f6fdd61928..5578d0145e 100644 --- a/ql/src/test/results/clientpositive/temp_table_partition_pruning.q.out +++ b/ql/src/test/results/clientpositive/temp_table_partition_pruning.q.out @@ -172,11 +172,8 @@ STAGE PLANS: partition values: dt 2001-01-01 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 column.name.delimiter , columns customer - columns.comments columns.types int #### A masked pattern was here #### name default.daysales_temp @@ -185,7 +182,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 1 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2 @@ -194,8 +190,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns customer @@ -208,7 +202,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 0 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 @@ -223,11 +216,8 @@ STAGE PLANS: partition values: dt 2001-01-03 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 column.name.delimiter , columns customer - columns.comments columns.types int #### A masked pattern was here #### name default.daysales_temp @@ -236,7 +226,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 1 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2 @@ -245,8 +234,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns customer @@ -259,7 +246,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 0 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 @@ -347,11 +333,8 @@ STAGE PLANS: partition values: dt 2001-01-01 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 column.name.delimiter , columns customer - columns.comments columns.types int #### A masked pattern was here #### name default.daysales_temp @@ -360,7 +343,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 1 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2 @@ -369,8 +351,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns customer @@ -383,7 +363,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 0 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 @@ -398,11 +377,8 @@ STAGE PLANS: partition values: dt 2001-01-03 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 column.name.delimiter , columns customer - columns.comments columns.types int #### A masked pattern was here #### name default.daysales_temp @@ -411,7 +387,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 1 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2 @@ -420,8 +395,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns customer @@ -434,7 +407,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 0 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 @@ -522,11 +494,8 @@ STAGE PLANS: partition values: dt 2001-01-01 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 column.name.delimiter , columns customer - columns.comments columns.types int #### A masked pattern was here #### name default.daysales_temp @@ -535,7 +504,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 1 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2 @@ -544,8 +512,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns customer @@ -558,7 +524,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 0 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 @@ -573,11 +538,8 @@ STAGE PLANS: partition values: dt 2001-01-03 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 column.name.delimiter , columns customer - columns.comments columns.types int #### A masked pattern was here #### name default.daysales_temp @@ -586,7 +548,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 1 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2 @@ -595,8 +556,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"customer":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns customer @@ -609,7 +568,6 @@ STAGE PLANS: partition_columns dt partition_columns.types string rawDataSize 0 - serialization.ddl struct daysales_temp { i32 customer} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 diff --git a/ql/src/test/results/clientpositive/timestamp.q.out b/ql/src/test/results/clientpositive/timestamp.q.out index 90a46f58f4..5851bb01c8 100644 --- a/ql/src/test/results/clientpositive/timestamp.q.out +++ b/ql/src/test/results/clientpositive/timestamp.q.out @@ -164,30 +164,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -198,11 +192,9 @@ STAGE PLANS: numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/transform_ppr1.q.out b/ql/src/test/results/clientpositive/transform_ppr1.q.out index 25468bcd9c..83cfc34fde 100644 --- a/ql/src/test/results/clientpositive/transform_ppr1.q.out +++ b/ql/src/test/results/clientpositive/transform_ppr1.q.out @@ -87,11 +87,8 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -100,17 +97,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -120,10 +114,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -136,11 +128,8 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -149,17 +138,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -169,10 +155,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -185,11 +169,8 @@ STAGE PLANS: ds 2008-04-09 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -198,17 +179,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -218,10 +196,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -234,11 +210,8 @@ STAGE PLANS: ds 2008-04-09 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -247,17 +220,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -267,10 +237,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart diff --git a/ql/src/test/results/clientpositive/transform_ppr2.q.out b/ql/src/test/results/clientpositive/transform_ppr2.q.out index 8aeb688513..aabf5be298 100644 --- a/ql/src/test/results/clientpositive/transform_ppr2.q.out +++ b/ql/src/test/results/clientpositive/transform_ppr2.q.out @@ -86,11 +86,8 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -99,17 +96,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -119,10 +113,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -135,11 +127,8 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -148,17 +137,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -168,10 +154,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart diff --git a/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out b/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out index c8e40bd447..f2947bb164 100644 --- a/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out +++ b/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out @@ -119,10 +119,8 @@ STAGE PLANS: partition values: part 1 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.test_tab_n3 @@ -131,17 +129,14 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 4812 - serialization.ddl struct test_tab_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 1761 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -151,10 +146,8 @@ STAGE PLANS: name default.test_tab_n3 partition_columns part partition_columns.types string - serialization.ddl struct test_tab_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.test_tab_n3 name: default.test_tab_n3 @@ -248,10 +241,8 @@ STAGE PLANS: partition values: part 1 properties: - bucket_count -1 column.name.delimiter , columns key,value - columns.comments columns.types string:string #### A masked pattern was here #### name default.test_tab_n3 @@ -260,17 +251,14 @@ STAGE PLANS: partition_columns part partition_columns.types string rawDataSize 4812 - serialization.ddl struct test_tab_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 1761 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -280,10 +268,8 @@ STAGE PLANS: name default.test_tab_n3 partition_columns part partition_columns.types string - serialization.ddl struct test_tab_n3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.test_tab_n3 name: default.test_tab_n3 diff --git a/ql/src/test/results/clientpositive/udf_explode.q.out b/ql/src/test/results/clientpositive/udf_explode.q.out index 0143f3160b..b1d7f8bb55 100644 --- a/ql/src/test/results/clientpositive/udf_explode.q.out +++ b/ql/src/test/results/clientpositive/udf_explode.q.out @@ -95,30 +95,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -129,11 +123,9 @@ STAGE PLANS: numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -296,30 +288,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -330,11 +316,9 @@ STAGE PLANS: numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/udtf_explode.q.out b/ql/src/test/results/clientpositive/udtf_explode.q.out index 1b941b87bb..6f6aef90ae 100644 --- a/ql/src/test/results/clientpositive/udtf_explode.q.out +++ b/ql/src/test/results/clientpositive/udtf_explode.q.out @@ -101,30 +101,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -135,11 +129,9 @@ STAGE PLANS: numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src @@ -387,30 +379,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.src numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -421,11 +407,9 @@ STAGE PLANS: numFiles 1 numRows 500 rawDataSize 5312 - serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src diff --git a/ql/src/test/results/clientpositive/union22.q.out b/ql/src/test/results/clientpositive/union22.q.out index de36e44dfb..bcdfeb1fbe 100644 --- a/ql/src/test/results/clientpositive/union22.q.out +++ b/ql/src/test/results/clientpositive/union22.q.out @@ -118,11 +118,8 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"k0":"true","k1":"true","k2":"true","k3":"true","k4":"true","k5":"true"}} - bucket_count -1 column.name.delimiter , columns k0,k1,k2,k3,k4,k5 - columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta @@ -131,17 +128,14 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 16936 - serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17436 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns k0,k1,k2,k3,k4,k5 @@ -151,10 +145,8 @@ STAGE PLANS: name default.dst_union22_delta partition_columns ds partition_columns.types string - serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta name: default.dst_union22_delta @@ -251,11 +243,8 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"k1":"true","k2":"true","k3":"true","k4":"true"}} - bucket_count -1 column.name.delimiter , columns k1,k2,k3,k4 - columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 @@ -264,17 +253,14 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 11124 - serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11624 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns k1,k2,k3,k4 @@ -284,10 +270,8 @@ STAGE PLANS: name default.dst_union22 partition_columns ds partition_columns.types string - serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 name: default.dst_union22 @@ -299,11 +283,8 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"k0":"true","k1":"true","k2":"true","k3":"true","k4":"true","k5":"true"}} - bucket_count -1 column.name.delimiter , columns k0,k1,k2,k3,k4,k5 - columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta @@ -312,17 +293,14 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 16936 - serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17436 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns k0,k1,k2,k3,k4,k5 @@ -332,10 +310,8 @@ STAGE PLANS: name default.dst_union22_delta partition_columns ds partition_columns.types string - serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta name: default.dst_union22_delta @@ -373,7 +349,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns k1,k2,k3,k4 @@ -383,10 +358,8 @@ STAGE PLANS: name default.dst_union22 partition_columns ds partition_columns.types string - serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 TotalFiles: 1 @@ -431,7 +404,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns k1,k2,k3,k4 @@ -441,10 +413,8 @@ STAGE PLANS: name default.dst_union22 partition_columns ds partition_columns.types string - serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 TotalFiles: 1 @@ -505,11 +475,8 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"k0":"true","k1":"true","k2":"true","k3":"true","k4":"true","k5":"true"}} - bucket_count -1 column.name.delimiter , columns k0,k1,k2,k3,k4,k5 - columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta @@ -518,17 +485,14 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 16936 - serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17436 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns k0,k1,k2,k3,k4,k5 @@ -538,10 +502,8 @@ STAGE PLANS: name default.dst_union22_delta partition_columns ds partition_columns.types string - serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta name: default.dst_union22_delta @@ -596,7 +558,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns k1,k2,k3,k4 @@ -606,10 +567,8 @@ STAGE PLANS: name default.dst_union22 partition_columns ds partition_columns.types string - serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -685,11 +644,8 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"k1":"true","k2":"true","k3":"true","k4":"true"}} - bucket_count -1 column.name.delimiter , columns k1,k2,k3,k4 - columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 @@ -698,17 +654,14 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 11124 - serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11624 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns k1,k2,k3,k4 @@ -718,10 +671,8 @@ STAGE PLANS: name default.dst_union22 partition_columns ds partition_columns.types string - serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 name: default.dst_union22 @@ -733,11 +684,8 @@ STAGE PLANS: partition values: ds 1 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"k0":"true","k1":"true","k2":"true","k3":"true","k4":"true","k5":"true"}} - bucket_count -1 column.name.delimiter , columns k0,k1,k2,k3,k4,k5 - columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta @@ -746,17 +694,14 @@ STAGE PLANS: partition_columns ds partition_columns.types string rawDataSize 16936 - serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17436 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns k0,k1,k2,k3,k4,k5 @@ -766,10 +711,8 @@ STAGE PLANS: name default.dst_union22_delta partition_columns ds partition_columns.types string - serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta name: default.dst_union22_delta diff --git a/ql/src/test/results/clientpositive/union24.q.out b/ql/src/test/results/clientpositive/union24.q.out index 32a86e7f02..4ccb8a75a2 100644 --- a/ql/src/test/results/clientpositive/union24.q.out +++ b/ql/src/test/results/clientpositive/union24.q.out @@ -140,30 +140,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src5_n3 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src5_n3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -174,11 +168,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src5_n3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src5_n3 name: default.src5_n3 @@ -391,30 +383,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src2_n6 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src2_n6 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -425,11 +411,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src2_n6 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src2_n6 name: default.src2_n6 @@ -439,30 +423,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src3_n2 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src3_n2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -473,11 +451,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src3_n2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src3_n2 name: default.src3_n2 @@ -487,30 +463,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src4_n0 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src4_n0 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -521,11 +491,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src4_n0 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src4_n0 name: default.src4_n0 @@ -703,30 +671,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src4_n0 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src4_n0 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -737,11 +699,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src4_n0 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src4_n0 name: default.src4_n0 @@ -751,30 +711,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src5_n3 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src5_n3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -785,11 +739,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src5_n3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src5_n3 name: default.src5_n3 @@ -970,30 +922,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src2_n6 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src2_n6 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -1004,11 +950,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src2_n6 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src2_n6 name: default.src2_n6 @@ -1018,30 +962,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src3_n2 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src3_n2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -1052,11 +990,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src3_n2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src3_n2 name: default.src3_n2 @@ -1224,30 +1160,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src4_n0 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src4_n0 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -1258,11 +1188,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src4_n0 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src4_n0 name: default.src4_n0 @@ -1272,30 +1200,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src5_n3 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src5_n3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -1306,11 +1228,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src5_n3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src5_n3 name: default.src5_n3 @@ -1566,30 +1486,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src2_n6 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src2_n6 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -1600,11 +1514,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src2_n6 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src2_n6 name: default.src2_n6 @@ -1614,30 +1526,24 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count - columns.comments columns.types string:bigint #### A masked pattern was here #### name default.src3_n2 numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src3_n2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"count":"true","key":"true"}} - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,count @@ -1648,11 +1554,9 @@ STAGE PLANS: numFiles 1 numRows 309 rawDataSize 1482 - serialization.ddl struct src3_n2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 1791 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src3_n2 name: default.src3_n2 diff --git a/ql/src/test/results/clientpositive/union_ppr.q.out b/ql/src/test/results/clientpositive/union_ppr.q.out index b841994373..d30f972fcf 100644 --- a/ql/src/test/results/clientpositive/union_ppr.q.out +++ b/ql/src/test/results/clientpositive/union_ppr.q.out @@ -100,11 +100,8 @@ STAGE PLANS: ds 2008-04-08 hr 11 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -113,17 +110,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -133,10 +127,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart @@ -149,11 +141,8 @@ STAGE PLANS: ds 2008-04-08 hr 12 properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} - bucket_count -1 column.name.delimiter , columns key,value - columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart @@ -162,17 +151,14 @@ STAGE PLANS: partition_columns ds/hr partition_columns.types string:string rawDataSize 5312 - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - bucket_count -1 bucketing_version 2 column.name.delimiter , columns key,value @@ -182,10 +168,8 @@ STAGE PLANS: name default.srcpart partition_columns ds/hr partition_columns.types string:string - serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java index 18f689ebf4..a70a8fa403 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java @@ -43,6 +43,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.metastore.ColumnType; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; @@ -661,9 +662,12 @@ public static Properties getSchemaWithoutCols(StorageDescriptor sd, org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_LOCATION, sd.getLocation()); } - schema.setProperty( - org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.BUCKET_COUNT, Integer - .toString(sd.getNumBuckets())); + int bucket_cnt = sd.getNumBuckets(); + if (bucket_cnt > 0) { + schema.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.BUCKET_COUNT, + Integer.toString(bucket_cnt)); + } + if (sd.getBucketCols() != null && sd.getBucketCols().size() > 0) { schema.setProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.BUCKET_FIELD_NAME, @@ -679,10 +683,6 @@ public static Properties getSchemaWithoutCols(StorageDescriptor sd, } } - if (sd.getCols() != null) { - schema.setProperty(ColumnType.SERIALIZATION_DDL, getDDLFromFieldSchema(tableName, sd.getCols())); - } - String partString = StringUtils.EMPTY; String partStringSep = StringUtils.EMPTY; String partTypesString = StringUtils.EMPTY; @@ -711,7 +711,13 @@ public static Properties getSchemaWithoutCols(StorageDescriptor sd, if (parameters != null) { for (Map.Entry e : parameters.entrySet()) { // add non-null parameters to the schema - if ( e.getValue() != null) { + String key = e.getKey(); + if (!StatsSetupConst.COLUMN_STATS_ACCURATE.equals(key) && + !hive_metastoreConstants.DDL_TIME.equals(key) && + !StatsSetupConst.TOTAL_SIZE.equals(key) && + !StatsSetupConst.RAW_DATA_SIZE.equals(key) && + !StatsSetupConst.NUM_FILES.equals(key) && + !StatsSetupConst.ROW_COUNT.equals(key) && e.getValue() != null) { schema.setProperty(e.getKey(), e.getValue()); } } -- 2.17.2 (Apple Git-113)